idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
43,000
def _load_yaml ( self , config_file ) : try : with open ( config_file ) as f : return yaml . safe_load ( f . read ( ) ) except yaml . YAMLError as exc : logger . error ( "Could not parse config as YAML: {}" , str ( exc ) ) if hasattr ( exc , 'problem_mark' ) : mark = exc . problem_mark logger . error ( "Error position: (%s:%s)" % ( mark . line + 1 , mark . column + 1 ) ) except IOError as e : logger . debug ( "Could not open config file: {}" , str ( e ) ) return None
Load config from yaml file or string
43,001
def satosa_logging ( logger , level , message , state , ** kwargs ) : if state is None : session_id = "UNKNOWN" else : try : session_id = state [ LOGGER_STATE_KEY ] except KeyError : session_id = uuid4 ( ) . urn state [ LOGGER_STATE_KEY ] = session_id logger . log ( level , "[{id}] {msg}" . format ( id = session_id , msg = message ) , ** kwargs )
Adds a session ID to the message .
43,002
def process ( self , context , internal_response ) : consent_state = context . state [ STATE_KEY ] internal_response . attributes = self . _filter_attributes ( internal_response . attributes , consent_state [ "filter" ] ) id_hash = self . _get_consent_id ( internal_response . requester , internal_response . subject_id , internal_response . attributes ) try : consent_attributes = self . _verify_consent ( id_hash ) except requests . exceptions . ConnectionError as e : satosa_logging ( logger , logging . ERROR , "Consent service is not reachable, no consent given." , context . state ) internal_response . attributes = { } return self . _end_consent ( context , internal_response ) if consent_attributes is not None : satosa_logging ( logger , logging . DEBUG , "Previous consent was given" , context . state ) internal_response . attributes = self . _filter_attributes ( internal_response . attributes , consent_attributes ) return self . _end_consent ( context , internal_response ) return self . _approve_new_consent ( context , internal_response , id_hash )
Manage consent and attribute filtering
43,003
def _get_consent_id ( self , requester , user_id , filtered_attr ) : filtered_attr_key_list = sorted ( filtered_attr . keys ( ) ) hash_str = "" for key in filtered_attr_key_list : _hash_value = "" . join ( sorted ( filtered_attr [ key ] ) ) hash_str += key + _hash_value id_string = "%s%s%s" % ( requester , user_id , hash_str ) return urlsafe_b64encode ( hashlib . sha512 ( id_string . encode ( "utf-8" ) ) . hexdigest ( ) . encode ( "utf-8" ) ) . decode ( "utf-8" )
Get a hashed id based on requester user id and filtered attributes
43,004
def _consent_registration ( self , consent_args ) : jws = JWS ( json . dumps ( consent_args ) , alg = self . signing_key . alg ) . sign_compact ( [ self . signing_key ] ) request = "{}/creq/{}" . format ( self . api_url , jws ) res = requests . get ( request ) if res . status_code != 200 : raise UnexpectedResponseError ( "Consent service error: %s %s" , res . status_code , res . text ) return res . text
Register a request at the consent service
43,005
def _verify_consent ( self , consent_id ) : request = "{}/verify/{}" . format ( self . api_url , consent_id ) res = requests . get ( request ) if res . status_code == 200 : return json . loads ( res . text ) return None
Connects to the consent service using the REST api and checks if the user has given consent
43,006
def _end_consent ( self , context , internal_response ) : del context . state [ STATE_KEY ] return super ( ) . process ( context , internal_response )
Clear the state for consent and end the consent step
43,007
def constructPrimaryIdentifier ( self , data , ordered_identifier_candidates ) : logprefix = PrimaryIdentifier . logprefix context = self . context attributes = data . attributes satosa_logging ( logger , logging . DEBUG , "{} Input attributes {}" . format ( logprefix , attributes ) , context . state ) value = None for candidate in ordered_identifier_candidates : satosa_logging ( logger , logging . DEBUG , "{} Considering candidate {}" . format ( logprefix , candidate ) , context . state ) values = [ attributes . get ( attribute_name , [ None ] ) [ 0 ] for attribute_name in candidate [ 'attribute_names' ] ] satosa_logging ( logger , logging . DEBUG , "{} Found candidate values {}" . format ( logprefix , values ) , context . state ) if 'name_id' in candidate [ 'attribute_names' ] : candidate_nameid_value = None candidate_nameid_value = None candidate_name_id_format = candidate . get ( 'name_id_format' ) name_id_value = data . subject_id name_id_format = data . subject_type if ( name_id_value and candidate_name_id_format and candidate_name_id_format == name_id_format ) : satosa_logging ( logger , logging . DEBUG , "{} IdP asserted NameID {}" . format ( logprefix , name_id_value ) , context . state ) candidate_nameid_value = name_id_value if candidate_nameid_value not in values : satosa_logging ( logger , logging . DEBUG , "{} Added NameID {} to candidate values" . format ( logprefix , candidate_nameid_value ) , context . state ) values . append ( candidate_nameid_value ) else : satosa_logging ( logger , logging . WARN , "{} NameID {} value also asserted as attribute value" . format ( logprefix , candidate_nameid_value ) , context . state ) if None in values : satosa_logging ( logger , logging . DEBUG , "{} Candidate is missing value so skipping" . format ( logprefix ) , context . state ) continue if 'add_scope' in candidate : if candidate [ 'add_scope' ] == 'issuer_entityid' : scope = data . auth_info . issuer else : scope = candidate [ 'add_scope' ] satosa_logging ( logger , logging . DEBUG , "{} Added scope {} to values" . format ( logprefix , scope ) , context . state ) values . append ( scope ) value = '' . join ( values ) break return value
Construct and return a primary identifier value from the data asserted by the IdP using the ordered list of candidates from the configuration .
43,008
def state_to_cookie ( state , name , path , encryption_key ) : cookie_data = "" if state . delete else state . urlstate ( encryption_key ) max_age = 0 if state . delete else STATE_COOKIE_MAX_AGE satosa_logging ( logger , logging . DEBUG , "Saving state as cookie, secure: %s, max-age: %s, path: %s" % ( STATE_COOKIE_SECURE , STATE_COOKIE_MAX_AGE , path ) , state ) cookie = SimpleCookie ( ) cookie [ name ] = cookie_data cookie [ name ] [ "secure" ] = STATE_COOKIE_SECURE cookie [ name ] [ "path" ] = path cookie [ name ] [ "max-age" ] = max_age return cookie
Saves a state to a cookie
43,009
def cookie_to_state ( cookie_str , name , encryption_key ) : try : cookie = SimpleCookie ( cookie_str ) state = State ( cookie [ name ] . value , encryption_key ) except KeyError as e : msg_tmpl = 'No cookie named {name} in {data}' msg = msg_tmpl . format ( name = name , data = cookie_str ) logger . exception ( msg ) raise SATOSAStateError ( msg ) from e except ValueError as e : msg_tmpl = 'Failed to process {name} from {data}' msg = msg_tmpl . format ( name = name , data = cookie_str ) logger . exception ( msg ) raise SATOSAStateError ( msg ) from e else : msg_tmpl = 'Loading state from cookie {data}' msg = msg_tmpl . format ( data = cookie_str ) satosa_logging ( logger , logging . DEBUG , msg , state ) return state
Loads a state from a cookie
43,010
def encrypt ( self , raw ) : raw = self . _pad ( raw ) iv = Random . new ( ) . read ( AES . block_size ) cipher = AES . new ( self . key , AES . MODE_CBC , iv ) return base64 . urlsafe_b64encode ( iv + cipher . encrypt ( raw ) )
Encryptes the parameter raw .
43,011
def _pad ( self , b ) : return b + ( self . bs - len ( b ) % self . bs ) * chr ( self . bs - len ( b ) % self . bs ) . encode ( "UTF-8" )
Will padd the param to be of the correct length for the encryption alg .
43,012
def urlstate ( self , encryption_key ) : lzma = LZMACompressor ( ) urlstate_data = json . dumps ( self . _state_dict ) urlstate_data = lzma . compress ( urlstate_data . encode ( "UTF-8" ) ) urlstate_data += lzma . flush ( ) urlstate_data = _AESCipher ( encryption_key ) . encrypt ( urlstate_data ) lzma = LZMACompressor ( ) urlstate_data = lzma . compress ( urlstate_data ) urlstate_data += lzma . flush ( ) urlstate_data = base64 . urlsafe_b64encode ( urlstate_data ) return urlstate_data . decode ( "utf-8" )
Will return a url safe representation of the state .
43,013
def copy ( self ) : state_copy = State ( ) state_copy . _state_dict = copy . deepcopy ( self . _state_dict ) return state_copy
Returns a deepcopy of the state
43,014
def saml_name_id_format_to_hash_type ( name_format ) : msg = "saml_name_id_format_to_hash_type is deprecated and will be removed." _warnings . warn ( msg , DeprecationWarning ) name_id_format_to_hash_type = { NAMEID_FORMAT_TRANSIENT : UserIdHashType . transient , NAMEID_FORMAT_PERSISTENT : UserIdHashType . persistent , NAMEID_FORMAT_EMAILADDRESS : UserIdHashType . emailaddress , NAMEID_FORMAT_UNSPECIFIED : UserIdHashType . unspecified , } return name_id_format_to_hash_type . get ( name_format , UserIdHashType . transient )
Translate pySAML2 name format to satosa format
43,015
def hash_type_to_saml_name_id_format ( hash_type ) : msg = "hash_type_to_saml_name_id_format is deprecated and will be removed." _warnings . warn ( msg , DeprecationWarning ) hash_type_to_name_id_format = { UserIdHashType . transient : NAMEID_FORMAT_TRANSIENT , UserIdHashType . persistent : NAMEID_FORMAT_PERSISTENT , UserIdHashType . emailaddress : NAMEID_FORMAT_EMAILADDRESS , UserIdHashType . unspecified : NAMEID_FORMAT_UNSPECIFIED , } return hash_type_to_name_id_format . get ( hash_type , NAMEID_FORMAT_PERSISTENT )
Translate satosa format to pySAML2 name format
43,016
def save_state ( internal_request , state ) : state_data = { "hash_type" : internal_request . user_id_hash_type } state [ UserIdHasher . STATE_KEY ] = state_data
Saves all necessary information needed by the UserIdHasher
43,017
def hash_id ( salt , user_id , requester , state ) : hash_type_to_format = { NAMEID_FORMAT_TRANSIENT : "{id}{req}{time}" , NAMEID_FORMAT_PERSISTENT : "{id}{req}" , "pairwise" : "{id}{req}" , "public" : "{id}" , NAMEID_FORMAT_EMAILADDRESS : "{id}" , NAMEID_FORMAT_UNSPECIFIED : "{id}" , } format_args = { "id" : user_id , "req" : requester , "time" : datetime . datetime . utcnow ( ) . timestamp ( ) , } hash_type = UserIdHasher . hash_type ( state ) try : fmt = hash_type_to_format [ hash_type ] except KeyError as e : raise ValueError ( "Unknown hash type: {}" . format ( hash_type ) ) from e else : user_id = fmt . format ( ** format_args ) hasher = ( ( lambda salt , value : value ) if hash_type in [ NAMEID_FORMAT_EMAILADDRESS , NAMEID_FORMAT_UNSPECIFIED ] else util . hash_data ) return hasher ( salt , user_id )
Sets a user id to the internal_response in the format specified by the internal response
43,018
def load_backends ( config , callback , internal_attributes ) : backend_modules = _load_plugins ( config . get ( "CUSTOM_PLUGIN_MODULE_PATHS" ) , config [ "BACKEND_MODULES" ] , backend_filter , config [ "BASE" ] , internal_attributes , callback ) logger . info ( "Setup backends: %s" % [ backend . name for backend in backend_modules ] ) return backend_modules
Load all backend modules specified in the config
43,019
def load_frontends ( config , callback , internal_attributes ) : frontend_modules = _load_plugins ( config . get ( "CUSTOM_PLUGIN_MODULE_PATHS" ) , config [ "FRONTEND_MODULES" ] , frontend_filter , config [ "BASE" ] , internal_attributes , callback ) logger . info ( "Setup frontends: %s" % [ frontend . name for frontend in frontend_modules ] ) return frontend_modules
Load all frontend modules specified in the config
43,020
def _micro_service_filter ( cls ) : is_microservice_module = issubclass ( cls , MicroService ) is_correct_subclass = cls != MicroService and cls != ResponseMicroService and cls != RequestMicroService return is_microservice_module and is_correct_subclass
Will only give a find on classes that is a subclass of MicroService with the exception that the class is not allowed to be a direct ResponseMicroService or RequestMicroService .
43,021
def _load_plugins ( plugin_paths , plugins , plugin_filter , base_url , internal_attributes , callback ) : loaded_plugin_modules = [ ] with prepend_to_import_path ( plugin_paths ) : for plugin_config in plugins : try : module_class = _load_endpoint_module ( plugin_config , plugin_filter ) except SATOSAConfigurationError as e : raise SATOSAConfigurationError ( "Configuration error in {}" . format ( json . dumps ( plugin_config ) ) ) from e if module_class : module_config = _replace_variables_in_plugin_module_config ( plugin_config [ "config" ] , base_url , plugin_config [ "name" ] ) instance = module_class ( callback , internal_attributes , module_config , base_url , plugin_config [ "name" ] ) loaded_plugin_modules . append ( instance ) return loaded_plugin_modules
Loads endpoint plugins
43,022
def create_and_write_saml_metadata ( proxy_conf , key , cert , dir , valid , split_frontend_metadata = False , split_backend_metadata = False ) : satosa_config = SATOSAConfig ( proxy_conf ) secc = _get_security_context ( key , cert ) frontend_entities , backend_entities = create_entity_descriptors ( satosa_config ) output = [ ] if frontend_entities : if split_frontend_metadata : output . extend ( _create_split_entity_descriptors ( frontend_entities , secc , valid ) ) else : output . extend ( _create_merged_entities_descriptors ( frontend_entities , secc , valid , "frontend.xml" ) ) if backend_entities : if split_backend_metadata : output . extend ( _create_split_entity_descriptors ( backend_entities , secc , valid ) ) else : output . extend ( _create_merged_entities_descriptors ( backend_entities , secc , valid , "backend.xml" ) ) for metadata , filename in output : path = os . path . join ( dir , filename ) print ( "Writing metadata to '{}'" . format ( path ) ) with open ( path , "w" ) as f : f . write ( metadata )
Generates SAML metadata for the given PROXY_CONF signed with the given KEY and associated CERT .
43,023
def _handle_al_response ( self , context ) : saved_state = context . state [ self . name ] internal_response = InternalData . from_dict ( saved_state ) status_code , message = self . _get_uuid ( context , internal_response . auth_info . issuer , internal_response . attributes [ 'issuer_user_id' ] ) if status_code == 200 : satosa_logging ( logger , logging . INFO , "issuer/id pair is linked in AL service" , context . state ) internal_response . subject_id = message if self . id_to_attr : internal_response . attributes [ self . id_to_attr ] = [ message ] del context . state [ self . name ] return super ( ) . process ( context , internal_response ) else : satosa_logging ( logger , logging . INFO , "User selected to not link their identity in AL service" , context . state ) del context . state [ self . name ] return super ( ) . process ( context , internal_response )
Endpoint for handling account linking service response . When getting here user might have approved or rejected linking their account
43,024
def process ( self , context , internal_response ) : status_code , message = self . _get_uuid ( context , internal_response . auth_info . issuer , internal_response . subject_id ) data = { "issuer" : internal_response . auth_info . issuer , "redirect_endpoint" : "%s/account_linking%s" % ( self . base_url , self . endpoint ) } internal_response . attributes [ 'issuer_user_id' ] = internal_response . subject_id if status_code == 200 : satosa_logging ( logger , logging . INFO , "issuer/id pair is linked in AL service" , context . state ) internal_response . subject_id = message data [ 'user_id' ] = message if self . id_to_attr : internal_response . attributes [ self . id_to_attr ] = [ message ] else : satosa_logging ( logger , logging . INFO , "issuer/id pair is not linked in AL service. Got a ticket" , context . state ) data [ 'ticket' ] = message jws = JWS ( json . dumps ( data ) , alg = self . signing_key . alg ) . sign_compact ( [ self . signing_key ] ) context . state [ self . name ] = internal_response . to_dict ( ) return Redirect ( "%s/%s" % ( self . redirect_url , jws ) )
Manage account linking and recovery
43,025
def backend_routing ( self , context ) : satosa_logging ( logger , logging . DEBUG , "Routing to backend: %s " % context . target_backend , context . state ) backend = self . backends [ context . target_backend ] [ "instance" ] context . state [ STATE_KEY ] = context . target_frontend return backend
Returns the targeted backend and an updated state
43,026
def frontend_routing ( self , context ) : target_frontend = context . state [ STATE_KEY ] satosa_logging ( logger , logging . DEBUG , "Routing to frontend: %s " % target_frontend , context . state ) context . target_frontend = target_frontend frontend = self . frontends [ context . target_frontend ] [ "instance" ] return frontend
Returns the targeted frontend and original state
43,027
def endpoint_routing ( self , context ) : if context . path is None : satosa_logging ( logger , logging . DEBUG , "Context did not contain a path!" , context . state ) raise SATOSABadContextError ( "Context did not contain any path" ) satosa_logging ( logger , logging . DEBUG , "Routing path: %s" % context . path , context . state ) path_split = context . path . split ( "/" ) backend = path_split [ 0 ] if backend in self . backends : context . target_backend = backend else : satosa_logging ( logger , logging . DEBUG , "Unknown backend %s" % backend , context . state ) try : name , frontend_endpoint = self . _find_registered_endpoint ( context , self . frontends ) except ModuleRouter . UnknownEndpoint as e : pass else : context . target_frontend = name return frontend_endpoint try : name , micro_service_endpoint = self . _find_registered_endpoint ( context , self . micro_services ) except ModuleRouter . UnknownEndpoint as e : pass else : context . target_micro_service = name return micro_service_endpoint if backend in self . backends : backend_endpoint = self . _find_registered_backend_endpoint ( context ) if backend_endpoint : return backend_endpoint raise SATOSANoBoundEndpointError ( "'{}' not bound to any function" . format ( context . path ) )
Finds and returns the endpoint function bound to the path
43,028
def _auth_req_callback_func ( self , context , internal_request ) : state = context . state state [ STATE_KEY ] = { "requester" : internal_request . requester } try : state_dict = context . state [ consent . STATE_KEY ] except KeyError : state_dict = context . state [ consent . STATE_KEY ] = { } finally : state_dict . update ( { "filter" : internal_request . attributes or [ ] , "requester_name" : internal_request . requester_name , } ) satosa_logging ( logger , logging . INFO , "Requesting provider: {}" . format ( internal_request . requester ) , state ) if self . request_micro_services : return self . request_micro_services [ 0 ] . process ( context , internal_request ) return self . _auth_req_finish ( context , internal_request )
This function is called by a frontend module when an authorization request has been processed .
43,029
def _auth_resp_callback_func ( self , context , internal_response ) : context . request = None internal_response . requester = context . state [ STATE_KEY ] [ "requester" ] if "user_id_from_attrs" in self . config [ "INTERNAL_ATTRIBUTES" ] : subject_id = [ "" . join ( internal_response . attributes [ attr ] ) for attr in self . config [ "INTERNAL_ATTRIBUTES" ] [ "user_id_from_attrs" ] ] internal_response . subject_id = "" . join ( subject_id ) if self . response_micro_services : return self . response_micro_services [ 0 ] . process ( context , internal_response ) return self . _auth_resp_finish ( context , internal_response )
This function is called by a backend module when the authorization is complete .
43,030
def _handle_satosa_authentication_error ( self , error ) : context = Context ( ) context . state = error . state frontend = self . module_router . frontend_routing ( context ) return frontend . handle_backend_error ( error )
Sends a response to the requester about the error
43,031
def _load_state ( self , context ) : try : state = cookie_to_state ( context . cookie , self . config [ "COOKIE_STATE_NAME" ] , self . config [ "STATE_ENCRYPTION_KEY" ] ) except SATOSAStateError as e : msg_tmpl = 'Failed to decrypt state {state} with {error}' msg = msg_tmpl . format ( state = context . cookie , error = str ( e ) ) satosa_logging ( logger , logging . WARNING , msg , None ) state = State ( ) finally : context . state = state
Load state from cookie to the context
43,032
def _save_state ( self , resp , context ) : cookie = state_to_cookie ( context . state , self . config [ "COOKIE_STATE_NAME" ] , "/" , self . config [ "STATE_ENCRYPTION_KEY" ] ) resp . headers . append ( tuple ( cookie . output ( ) . split ( ": " , 1 ) ) )
Saves a state from context to cookie
43,033
def run ( self , context ) : try : self . _load_state ( context ) spec = self . module_router . endpoint_routing ( context ) resp = self . _run_bound_endpoint ( context , spec ) self . _save_state ( resp , context ) except SATOSANoBoundEndpointError : raise except SATOSAError : satosa_logging ( logger , logging . ERROR , "Uncaught SATOSA error " , context . state , exc_info = True ) raise except UnknownSystemEntity as err : satosa_logging ( logger , logging . ERROR , "configuration error: unknown system entity " + str ( err ) , context . state , exc_info = False ) raise except Exception as err : satosa_logging ( logger , logging . ERROR , "Uncaught exception" , context . state , exc_info = True ) raise SATOSAUnknownError ( "Unknown error" ) from err return resp
Runs the satosa proxy with the given context .
43,034
def disco_query ( self ) : return_url = self . sp . config . getattr ( "endpoints" , "sp" ) [ "discovery_response" ] [ 0 ] [ 0 ] loc = self . sp . create_discovery_service_request ( self . discosrv , self . sp . config . entityid , ** { "return" : return_url } ) return SeeOther ( loc )
Makes a request to the discovery server
43,035
def authn_request ( self , context , entity_id ) : if self . idp_blacklist_file : with open ( self . idp_blacklist_file ) as blacklist_file : blacklist_array = json . load ( blacklist_file ) [ 'blacklist' ] if entity_id in blacklist_array : satosa_logging ( logger , logging . DEBUG , "IdP with EntityID {} is blacklisted" . format ( entity_id ) , context . state , exc_info = False ) raise SATOSAAuthenticationError ( context . state , "Selected IdP is blacklisted for this backend" ) kwargs = { } authn_context = self . construct_requested_authn_context ( entity_id ) if authn_context : kwargs [ 'requested_authn_context' ] = authn_context try : binding , destination = self . sp . pick_binding ( "single_sign_on_service" , None , "idpsso" , entity_id = entity_id ) satosa_logging ( logger , logging . DEBUG , "binding: %s, destination: %s" % ( binding , destination ) , context . state ) acs_endp , response_binding = self . sp . config . getattr ( "endpoints" , "sp" ) [ "assertion_consumer_service" ] [ 0 ] req_id , req = self . sp . create_authn_request ( destination , binding = response_binding , ** kwargs ) relay_state = util . rndstr ( ) ht_args = self . sp . apply_binding ( binding , "%s" % req , destination , relay_state = relay_state ) satosa_logging ( logger , logging . DEBUG , "ht_args: %s" % ht_args , context . state ) except Exception as exc : satosa_logging ( logger , logging . DEBUG , "Failed to construct the AuthnRequest for state" , context . state , exc_info = True ) raise SATOSAAuthenticationError ( context . state , "Failed to construct the AuthnRequest" ) from exc if self . sp . config . getattr ( 'allow_unsolicited' , 'sp' ) is False : if req_id in self . outstanding_queries : errmsg = "Request with duplicate id {}" . format ( req_id ) satosa_logging ( logger , logging . DEBUG , errmsg , context . state ) raise SATOSAAuthenticationError ( context . state , errmsg ) self . outstanding_queries [ req_id ] = req context . state [ self . name ] = { "relay_state" : relay_state } return make_saml_response ( binding , ht_args )
Do an authorization request on idp with given entity id . This is the start of the authorization .
43,036
def disco_response ( self , context ) : info = context . request state = context . state try : entity_id = info [ "entityID" ] except KeyError as err : satosa_logging ( logger , logging . DEBUG , "No IDP chosen for state" , state , exc_info = True ) raise SATOSAAuthenticationError ( state , "No IDP chosen" ) from err return self . authn_request ( context , entity_id )
Endpoint for the discovery server response
43,037
def _translate_response ( self , response , state ) : if self . encryption_keys : response . parse_assertion ( self . encryption_keys ) authn_info = response . authn_info ( ) [ 0 ] auth_class_ref = authn_info [ 0 ] timestamp = response . assertion . authn_statement [ 0 ] . authn_instant issuer = response . response . issuer . text auth_info = AuthenticationInformation ( auth_class_ref , timestamp , issuer , ) subject = response . get_subject ( ) name_id = subject . text if subject else None name_id_format = subject . format if subject else None attributes = self . converter . to_internal ( self . attribute_profile , response . ava , ) internal_resp = InternalData ( auth_info = auth_info , attributes = attributes , subject_type = name_id_format , subject_id = name_id , ) satosa_logging ( logger , logging . DEBUG , "backend received attributes:\n%s" % json . dumps ( response . ava , indent = 4 ) , state ) return internal_resp
Translates a saml authorization response to an internal response
43,038
def to_internal_filter ( self , attribute_profile , external_attribute_names ) : try : profile_mapping = self . to_internal_attributes [ attribute_profile ] except KeyError : logger . warn ( "no attribute mapping found for the given attribute profile '%s'" , attribute_profile ) return [ ] internal_attribute_names = set ( ) for external_attribute_name in external_attribute_names : try : internal_attribute_name = profile_mapping [ external_attribute_name ] internal_attribute_names . add ( internal_attribute_name ) except KeyError : pass return list ( internal_attribute_names )
Converts attribute names from external type to internal
43,039
def to_internal ( self , attribute_profile , external_dict ) : internal_dict = { } for internal_attribute_name , mapping in self . from_internal_attributes . items ( ) : if attribute_profile not in mapping : logger . debug ( "no attribute mapping found for internal attribute '%s' the attribute profile '%s'" % ( internal_attribute_name , attribute_profile ) ) continue external_attribute_name = mapping [ attribute_profile ] attribute_values = self . _collate_attribute_values_by_priority_order ( external_attribute_name , external_dict ) if attribute_values : logger . debug ( "backend attribute '%s' mapped to %s" % ( external_attribute_name , internal_attribute_name ) ) internal_dict [ internal_attribute_name ] = attribute_values else : logger . debug ( "skipped backend attribute '%s': no value found" , external_attribute_name ) internal_dict = self . _handle_template_attributes ( attribute_profile , internal_dict ) return internal_dict
Converts the external data from type to internal
43,040
def from_internal ( self , attribute_profile , internal_dict ) : external_dict = { } for internal_attribute_name in internal_dict : try : attribute_mapping = self . from_internal_attributes [ internal_attribute_name ] except KeyError : logger . debug ( "no attribute mapping found for the internal attribute '%s'" , internal_attribute_name ) continue if attribute_profile not in attribute_mapping : logger . debug ( "no mapping found for '%s' in attribute profile '%s'" % ( internal_attribute_name , attribute_profile ) ) continue external_attribute_names = self . from_internal_attributes [ internal_attribute_name ] [ attribute_profile ] external_attribute_name = external_attribute_names [ 0 ] logger . debug ( "frontend attribute %s mapped from %s" % ( external_attribute_name , internal_attribute_name ) ) if self . separator in external_attribute_name : nested_attribute_names = external_attribute_name . split ( self . separator ) nested_dict = self . _create_nested_attribute_value ( nested_attribute_names [ 1 : ] , internal_dict [ internal_attribute_name ] ) external_dict [ nested_attribute_names [ 0 ] ] = nested_dict else : external_dict [ external_attribute_name ] = internal_dict [ internal_attribute_name ] return external_dict
Converts the internal data to type
43,041
def _serialize ( self , version , serialize_format , context , raise_if_incomplete = False ) : values = context . copy ( ) for k in version : values [ k ] = version [ k ] try : serialized = serialize_format . format ( ** values ) except KeyError as e : missing_key = getattr ( e , "message" , e . args [ 0 ] ) raise MissingValueForSerializationException ( "Did not find key {} in {} when serializing version number" . format ( repr ( missing_key ) , repr ( version ) ) ) keys_needing_representation = set ( ) found_required = False for k in self . order ( ) : v = values [ k ] if not isinstance ( v , VersionPart ) : continue if not v . is_optional ( ) : found_required = True keys_needing_representation . add ( k ) elif not found_required : keys_needing_representation . add ( k ) required_by_format = set ( self . _labels_for_format ( serialize_format ) ) if raise_if_incomplete : if not ( keys_needing_representation <= required_by_format ) : raise IncompleteVersionRepresentationException ( "Could not represent '{}' in format '{}'" . format ( "', '" . join ( keys_needing_representation ^ required_by_format ) , serialize_format , ) ) return serialized
Attempts to serialize a version with the given serialization format .
43,042
def value_text ( self ) : search = self . _selected . get ( ) for item in self . _rbuttons : if item . value == search : return item . text return ""
Sets or returns the option selected in a ButtonGroup by its text value .
43,043
def append ( self , option ) : self . _options . append ( self . _parse_option ( option ) ) self . _refresh_options ( ) self . resize ( self . _width , self . _height )
Appends a new option to the end of the ButtonGroup .
43,044
def insert ( self , index , option ) : self . _options . insert ( index , self . _parse_option ( option ) ) self . _refresh_options ( ) self . resize ( self . _width , self . _height )
Insert a new option in the ButtonGroup at index .
43,045
def remove ( self , option ) : for existing_option in self . _options : if existing_option [ 1 ] == option : self . _options . remove ( existing_option ) self . _refresh_options ( ) return True return False
Removes the first option from the ButtonGroup .
43,046
def update_command ( self , command , args = None ) : if command is None : self . _command = lambda : None else : if args is None : self . _command = command else : self . _command = utils . with_args ( command , * args )
Updates the callback command which is called when the ButtonGroup changes .
43,047
def insert ( self , index , option ) : option = str ( option ) self . _options . insert ( index , option ) if len ( self . _options ) == 1 : self . value = option self . _refresh_options ( )
Insert a new option in the Combo at index .
43,048
def remove ( self , option ) : if option in self . _options : if len ( self . _options ) == 1 : self . clear ( ) else : self . _options . remove ( option ) self . _refresh_options ( ) if option == self . value : self . _set_option ( self . _options [ 0 ] ) return True else : return False
Removes the first option from the Combo .
43,049
def clear ( self ) : self . _options = [ ] self . _combo_menu . tk . delete ( 0 , END ) self . _selected . set ( "" )
Clears all the options in a Combo
43,050
def _set_option ( self , value ) : if len ( self . _options ) > 0 : if value in self . _options : self . _selected . set ( value ) return True else : return False else : return False
Sets a single option in the Combo returning True if it was able too .
43,051
def _set_option_by_index ( self , index ) : if index < len ( self . _options ) : self . _selected . set ( self . _options [ index ] ) return True else : return False
Sets a single option in the Combo by its index returning True if it was able too .
43,052
def after ( self , time , function , args = [ ] ) : callback_id = self . tk . after ( time , self . _call_wrapper , time , function , * args ) self . _callback [ function ] = [ callback_id , False ]
Call function after time milliseconds .
43,053
def repeat ( self , time , function , args = [ ] ) : callback_id = self . tk . after ( time , self . _call_wrapper , time , function , * args ) self . _callback [ function ] = [ callback_id , True ]
Repeat function every time milliseconds .
43,054
def cancel ( self , function ) : if function in self . _callback . keys ( ) : callback_id = self . _callback [ function ] [ 0 ] self . tk . after_cancel ( callback_id ) self . _callback . pop ( function ) else : utils . error_format ( "Could not cancel function - it doesnt exist, it may have already run" )
Cancel the scheduled function calls .
43,055
def _call_wrapper ( self , time , function , * args ) : function ( * args ) if function in self . _callback . keys ( ) : repeat = self . _callback [ function ] [ 1 ] if repeat : callback_id = self . tk . after ( time , self . _call_wrapper , time , function , * args ) self . _callback [ function ] [ 0 ] = callback_id else : self . _callback . pop ( function )
Fired by tk . after gets the callback and either executes the function and cancels or repeats
43,056
def rebind ( self , tks ) : self . _tks = tks for tk in self . _tks : tk . unbind_all ( self . _tk_event ) func_id = tk . bind ( self . _tk_event , self . _event_callback ) self . _func_ids . append ( func_id )
Rebinds the tk event only used if a widget has been destroyed and recreated .
43,057
def rebind_events ( self , * tks ) : for ref in self . _refs : self . _refs [ ref ] . rebind ( tks )
Rebinds all the tk events only used if a tk widget has been destroyed and recreated .
43,058
def set_border ( self , thickness , color = "black" ) : self . _set_tk_config ( "highlightthickness" , thickness ) self . _set_tk_config ( "highlightbackground" , utils . convert_color ( color ) )
Sets the border thickness and color .
43,059
def hide ( self ) : self . tk . withdraw ( ) self . _visible = False if self . _modal : self . tk . grab_release ( )
Hide the window .
43,060
def show ( self , wait = False ) : self . tk . deiconify ( ) self . _visible = True self . _modal = wait if self . _modal : self . tk . grab_set ( )
Show the window .
43,061
def destroy ( self ) : if self == App . _main_app : App . _main_app = None self . tk . destroy ( )
Destroy and close the App .
43,062
def line ( self , x1 , y1 , x2 , y2 , color = "black" , width = 1 ) : return self . tk . create_line ( x1 , y1 , x2 , y2 , width = width , fill = "" if color is None else utils . convert_color ( color ) )
Draws a line between 2 points
43,063
def oval ( self , x1 , y1 , x2 , y2 , color = "black" , outline = False , outline_color = "black" ) : return self . tk . create_oval ( x1 , y1 , x2 , y2 , outline = utils . convert_color ( outline_color ) if outline else "" , width = int ( outline ) , fill = "" if color is None else utils . convert_color ( color ) )
Draws an oval between 2 points
43,064
def rectangle ( self , x1 , y1 , x2 , y2 , color = "black" , outline = False , outline_color = "black" ) : return self . tk . create_rectangle ( x1 , y1 , x2 , y2 , outline = utils . convert_color ( outline_color ) if outline else "" , width = int ( outline ) , fill = "" if color is None else utils . convert_color ( color ) )
Draws a rectangle between 2 points
43,065
def polygon ( self , * coords , color = "black" , outline = False , outline_color = "black" ) : return self . tk . create_polygon ( * coords , outline = utils . convert_color ( outline_color ) if outline else "" , width = int ( outline ) , fill = "" if color is None else utils . convert_color ( color ) )
Draws a polygon from an list of co - ordinates
43,066
def triangle ( self , x1 , y1 , x2 , y2 , x3 , y3 , color = "black" , outline = False , outline_color = "black" ) : return self . polygon ( x1 , y1 , x2 , y2 , x3 , y3 , color = color , outline = outline , outline_color = outline_color )
Draws a triangle between 3 points
43,067
def _get_tk_config ( self , key , default = False ) : if default : return self . _tk_defaults [ key ] else : return self . tk [ key ]
Gets the config from the widget s tk object .
43,068
def _set_tk_config ( self , keys , value ) : if isinstance ( keys , str ) : keys = [ keys ] for key in keys : if key in self . tk . keys ( ) : if value is None : self . tk [ key ] = self . _tk_defaults [ key ] else : self . tk [ key ] = value
Gets the config from the widget s tk object
43,069
def destroy ( self ) : if self . master is not None : self . master . _remove_child ( self ) self . tk . destroy ( )
Destroy the tk widget .
43,070
def add_tk_widget ( self , tk_widget , grid = None , align = None , visible = True , enabled = None , width = None , height = None ) : return Widget ( self , tk_widget , "tk widget" , grid , align , visible , enabled , width , height )
Adds a tk widget into a guizero container .
43,071
def display_widgets ( self ) : for child in self . children : if child . displayable : if self . layout != "grid" : child . tk . pack_forget ( ) else : child . tk . grid_forget ( ) if child . visible : if self . layout != "grid" : self . _pack_widget ( child ) else : self . _grid_widget ( child )
Displays all the widgets associated with this Container .
43,072
def disable ( self ) : self . _enabled = False for child in self . children : if isinstance ( child , ( Container , Widget ) ) : child . disable ( )
Disable all the widgets in this container
43,073
def enable ( self ) : self . _enabled = True for child in self . children : if isinstance ( child , ( Container , Widget ) ) : child . enable ( )
Enable all the widgets in this container
43,074
def exit_full_screen ( self ) : self . tk . attributes ( "-fullscreen" , False ) self . _full_screen = False self . events . remove_event ( "<FullScreen.Escape>" )
Change from full screen to windowed mode and remove key binding
43,075
def _set_propagation ( self , width , height ) : if width is None : width = 0 if height is None : height = 0 propagate_function = self . tk . pack_propagate if self . layout == "grid" : propagate_function = self . tk . grid_propagate propagate_value = True if isinstance ( width , int ) : if width > 0 : propagate_value = False if isinstance ( height , int ) : if height > 0 : propagate_value = False if isinstance ( width , int ) and isinstance ( height , int ) : if ( width == 0 and height > 0 ) or ( height == 0 and width > 0 ) : utils . error_format ( "You must specify a width and a height for {}" . format ( self . description ) ) propagate_function ( propagate_value )
Set the propagation value of the tk widget dependent on the width and height
43,076
def load ( self , addr , ty ) : rdt = self . irsb_c . load ( addr . rdt , ty ) return VexValue ( self . irsb_c , rdt )
Load a value from memory into a VEX temporary register .
43,077
def constant ( self , val , ty ) : if isinstance ( val , VexValue ) and not isinstance ( val , IRExpr ) : raise Exception ( 'Constant cannot be made from VexValue or IRExpr' ) rdt = self . irsb_c . mkconst ( val , ty ) return VexValue ( self . irsb_c , rdt )
Creates a constant as a VexValue
43,078
def put ( self , val , reg ) : offset = self . lookup_register ( self . irsb_c . irsb . arch , reg ) self . irsb_c . put ( val . rdt , offset )
Puts a value from a VEX temporary register into a machine register . This is how the results of operations done to registers get committed to the machine s state .
43,079
def put_conditional ( self , cond , valiftrue , valiffalse , reg ) : val = self . irsb_c . ite ( cond . rdt , valiftrue . rdt , valiffalse . rdt ) offset = self . lookup_register ( self . irsb_c . irsb . arch , reg ) self . irsb_c . put ( val , offset )
Like put except it checks a condition to decide what to put in the destination register .
43,080
def store ( self , val , addr ) : self . irsb_c . store ( addr . rdt , val . rdt )
Store a VexValue in memory at the specified loaction .
43,081
def jump ( self , condition , to_addr , jumpkind = JumpKind . Boring , ip_offset = None ) : to_addr_ty = None if isinstance ( to_addr , VexValue ) : to_addr_rdt = to_addr . rdt to_addr_ty = to_addr . ty elif isinstance ( to_addr , int ) : to_addr_ty = vex_int_class ( self . irsb_c . irsb . arch . bits ) . type to_addr = self . constant ( to_addr , to_addr_ty ) to_addr_rdt = to_addr . rdt elif isinstance ( to_addr , RdTmp ) : to_addr_ty = vex_int_class ( self . irsb_c . irsb . arch . bits ) . type to_addr_rdt = to_addr else : raise ValueError ( "Jump destination has unknown type: " + repr ( type ( to_addr ) ) ) if not condition : self . irsb_c . irsb . jumpkind = jumpkind self . irsb_c . irsb . next = to_addr_rdt else : if ip_offset is None : ip_offset = self . arch . ip_offset assert ip_offset is not None negated_condition_rdt = self . ite ( condition , self . constant ( 0 , condition . ty ) , self . constant ( 1 , condition . ty ) ) direct_exit_target = self . constant ( self . addr + ( self . bitwidth // 8 ) , to_addr_ty ) self . irsb_c . add_exit ( negated_condition_rdt , direct_exit_target . rdt , jumpkind , ip_offset ) self . irsb_c . irsb . jumpkind = jumpkind self . irsb_c . irsb . next = to_addr_rdt
Jump to a specified destination under the specified condition . Used for branches jumps calls returns etc .
43,082
def register ( lifter , arch_name ) : if issubclass ( lifter , Lifter ) : l . debug ( "Registering lifter %s for architecture %s." , lifter . __name__ , arch_name ) lifters [ arch_name ] . append ( lifter ) if issubclass ( lifter , Postprocessor ) : l . debug ( "Registering postprocessor %s for architecture %s." , lifter . __name__ , arch_name ) postprocessors [ arch_name ] . append ( lifter )
Registers a Lifter or Postprocessor to be used by pyvex . Lifters are are given priority based on the order in which they are registered . Postprocessors will be run in registration order .
43,083
def child_expressions ( self ) : expressions = [ ] for k in self . __slots__ : v = getattr ( self , k ) if isinstance ( v , IRExpr ) : expressions . append ( v ) expressions . extend ( v . child_expressions ) return expressions
A list of all of the expressions that this expression ends up evaluating .
43,084
def constants ( self ) : constants = [ ] for k in self . __slots__ : v = getattr ( self , k ) if isinstance ( v , IRExpr ) : constants . extend ( v . constants ) elif isinstance ( v , IRConst ) : constants . append ( v ) return constants
A list of all of the constants that this expression ends up using .
43,085
def expressions ( self ) : for s in self . statements : for expr_ in s . expressions : yield expr_ yield self . next
Return an iterator of all expressions contained in the IRSB .
43,086
def instructions ( self ) : if self . _instructions is None : if self . statements is None : self . _instructions = 0 else : self . _instructions = len ( [ s for s in self . statements if type ( s ) is stmt . IMark ] ) return self . _instructions
The number of instructions in this block
43,087
def instruction_addresses ( self ) : if self . _instruction_addresses is None : if self . statements is None : self . _instruction_addresses = [ ] else : self . _instruction_addresses = [ ( s . addr + s . delta ) for s in self . statements if type ( s ) is stmt . IMark ] return self . _instruction_addresses
Addresses of instructions in this block .
43,088
def size ( self ) : if self . _size is None : self . _size = sum ( s . len for s in self . statements if type ( s ) is stmt . IMark ) return self . _size
The size of this block in bytes
43,089
def operations ( self ) : ops = [ ] for e in self . expressions : if hasattr ( e , 'op' ) : ops . append ( e . op ) return ops
A list of all operations done by the IRSB as libVEX enum names
43,090
def constant_jump_targets ( self ) : exits = set ( ) if self . exit_statements : for _ , _ , stmt_ in self . exit_statements : exits . add ( stmt_ . dst . value ) default_target = self . default_exit_target if default_target is not None : exits . add ( default_target ) return exits
A set of the static jump targets of the basic block .
43,091
def constant_jump_targets_and_jumpkinds ( self ) : exits = dict ( ) if self . exit_statements : for _ , _ , stmt_ in self . exit_statements : exits [ stmt_ . dst . value ] = stmt_ . jumpkind default_target = self . default_exit_target if default_target is not None : exits [ default_target ] = self . jumpkind return exits
A dict of the static jump targets of the basic block to their jumpkind .
43,092
def _pp_str ( self ) : sa = [ ] sa . append ( "IRSB {" ) if self . statements is not None : sa . append ( " %s" % self . tyenv ) sa . append ( "" ) if self . statements is not None : for i , s in enumerate ( self . statements ) : if isinstance ( s , stmt . Put ) : stmt_str = s . __str__ ( reg_name = self . arch . translate_register_name ( s . offset , s . data . result_size ( self . tyenv ) // 8 ) ) elif isinstance ( s , stmt . WrTmp ) and isinstance ( s . data , expr . Get ) : stmt_str = s . __str__ ( reg_name = self . arch . translate_register_name ( s . data . offset , s . data . result_size ( self . tyenv ) // 8 ) ) elif isinstance ( s , stmt . Exit ) : stmt_str = s . __str__ ( reg_name = self . arch . translate_register_name ( s . offsIP , self . arch . bits // 8 ) ) else : stmt_str = s . __str__ ( ) sa . append ( " %02d | %s" % ( i , stmt_str ) ) else : sa . append ( " Statements are omitted." ) sa . append ( " NEXT: PUT(%s) = %s; %s" % ( self . arch . translate_register_name ( self . offsIP ) , self . next , self . jumpkind ) ) sa . append ( "}" ) return '\n' . join ( sa )
Return the pretty - printed IRSB .
43,093
def _is_defaultexit_direct_jump ( self ) : if not ( self . jumpkind == 'Ijk_InvalICache' or self . jumpkind == 'Ijk_Boring' or self . jumpkind == 'Ijk_Call' ) : return False target = self . default_exit_target return target is not None
Checks if the default of this IRSB a direct jump or not .
43,094
def lookup ( self , tmp ) : if tmp < 0 or tmp > self . types_used : l . debug ( "Invalid temporary number %d" , tmp ) raise IndexError ( tmp ) return self . types [ tmp ]
Return the type of temporary variable tmp as an enum string
43,095
def _lift ( self , data , bytes_offset = None , max_bytes = None , max_inst = None , opt_level = 1 , traceflags = None , allow_arch_optimizations = None , strict_block_end = None , skip_stmts = False , collect_data_refs = False ) : irsb = IRSB . empty_block ( self . arch , self . addr ) self . data = data self . bytes_offset = bytes_offset self . opt_level = opt_level self . traceflags = traceflags self . allow_arch_optimizations = allow_arch_optimizations self . strict_block_end = strict_block_end self . collect_data_refs = collect_data_refs self . max_inst = max_inst self . max_bytes = max_bytes self . skip_stmts = skip_stmts self . irsb = irsb self . lift ( ) return self . irsb
Wrapper around the lift method on Lifters . Should not be overridden in child classes .
43,096
def exp_backoff ( attempt , cap = 3600 , base = 300 ) : max_attempts = math . log ( cap / base , 2 ) if attempt <= max_attempts : return base * 2 ** attempt return cap
Exponential backoff time
43,097
def get_proxy ( self , proxy_address ) : if not proxy_address : return None hostport = extract_proxy_hostport ( proxy_address ) return self . proxies_by_hostport . get ( hostport , None )
Return complete proxy name associated with a hostport of a given proxy_address . If proxy_address is unkonwn or empty return None .
43,098
def mark_dead ( self , proxy , _time = None ) : if proxy not in self . proxies : logger . warn ( "Proxy <%s> was not found in proxies list" % proxy ) return if proxy in self . good : logger . debug ( "GOOD proxy became DEAD: <%s>" % proxy ) else : logger . debug ( "Proxy <%s> is DEAD" % proxy ) self . unchecked . discard ( proxy ) self . good . discard ( proxy ) self . dead . add ( proxy ) now = _time or time . time ( ) state = self . proxies [ proxy ] state . backoff_time = self . backoff ( state . failed_attempts ) state . next_check = now + state . backoff_time state . failed_attempts += 1
Mark a proxy as dead
43,099
def mark_good ( self , proxy ) : if proxy not in self . proxies : logger . warn ( "Proxy <%s> was not found in proxies list" % proxy ) return if proxy not in self . good : logger . debug ( "Proxy <%s> is GOOD" % proxy ) self . unchecked . discard ( proxy ) self . dead . discard ( proxy ) self . good . add ( proxy ) self . proxies [ proxy ] . failed_attempts = 0
Mark a proxy as good