idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
230,900
def put_file_url ( self , commit , path , url , recursive = False ) : req = iter ( [ proto . PutFileRequest ( file = proto . File ( commit = commit_from ( commit ) , path = path ) , url = url , recursive = recursive ) ] ) self . stub . PutFile ( req , metadata = self . metadata )
Puts a file using the content found at a URL . The URL is sent to the server which performs the request .
76
24
230,901
def get_file ( self , commit , path , offset_bytes = 0 , size_bytes = 0 , extract_value = True ) : req = proto . GetFileRequest ( file = proto . File ( commit = commit_from ( commit ) , path = path ) , offset_bytes = offset_bytes , size_bytes = size_bytes ) res = self . stub . GetFile ( req , metadata = self . metadata ) if extract_value : return ExtractValueIterator ( res ) return res
Returns an iterator of the contents contents of a file at a specific Commit .
105
15
230,902
def get_files ( self , commit , paths , recursive = False ) : filtered_file_infos = [ ] for path in paths : fi = self . inspect_file ( commit , path ) if fi . file_type == proto . FILE : filtered_file_infos . append ( fi ) else : filtered_file_infos += self . list_file ( commit , path , recursive = recursive ) filtered_paths = [ fi . file . path for fi in filtered_file_infos if fi . file_type == proto . FILE ] return { path : b'' . join ( self . get_file ( commit , path ) ) for path in filtered_paths }
Returns the contents of a list of files at a specific Commit as a dictionary of file paths to data .
146
21
230,903
def inspect_file ( self , commit , path ) : req = proto . InspectFileRequest ( file = proto . File ( commit = commit_from ( commit ) , path = path ) ) res = self . stub . InspectFile ( req , metadata = self . metadata ) return res
Returns info about a specific file .
59
7
230,904
def list_file ( self , commit , path , recursive = False ) : req = proto . ListFileRequest ( file = proto . File ( commit = commit_from ( commit ) , path = path ) ) res = self . stub . ListFile ( req , metadata = self . metadata ) file_infos = res . file_info if recursive : dirs = [ f for f in file_infos if f . file_type == proto . DIR ] files = [ f for f in file_infos if f . file_type == proto . FILE ] return sum ( [ self . list_file ( commit , d . file . path , recursive ) for d in dirs ] , files ) return list ( file_infos )
Lists the files in a directory .
157
8
230,905
def delete_file ( self , commit , path ) : req = proto . DeleteFileRequest ( file = proto . File ( commit = commit_from ( commit ) , path = path ) ) self . stub . DeleteFile ( req , metadata = self . metadata )
Deletes a file from a Commit . DeleteFile leaves a tombstone in the Commit assuming the file isn t written to later attempting to get the file from the finished commit will result in not found error . The file will of course remain intact in the Commit s parent .
55
54
230,906
def handle_authn_request ( self , context , binding_in ) : return self . _handle_authn_request ( context , binding_in , self . idp )
This method is bound to the starting endpoint of the authentication .
39
12
230,907
def _create_state_data ( self , context , resp_args , relay_state ) : if "name_id_policy" in resp_args and resp_args [ "name_id_policy" ] is not None : resp_args [ "name_id_policy" ] = resp_args [ "name_id_policy" ] . to_string ( ) . decode ( "utf-8" ) return { "resp_args" : resp_args , "relay_state" : relay_state }
Returns a dict containing the state needed in the response flow .
113
12
230,908
def _handle_authn_request ( self , context , binding_in , idp ) : req_info = idp . parse_authn_request ( context . request [ "SAMLRequest" ] , binding_in ) authn_req = req_info . message satosa_logging ( logger , logging . DEBUG , "%s" % authn_req , context . state ) try : resp_args = idp . response_args ( authn_req ) except SAMLError as e : satosa_logging ( logger , logging . ERROR , "Could not find necessary info about entity: %s" % e , context . state ) return ServiceError ( "Incorrect request from requester: %s" % e ) requester = resp_args [ "sp_entity_id" ] context . state [ self . name ] = self . _create_state_data ( context , idp . response_args ( authn_req ) , context . request . get ( "RelayState" ) ) subject = authn_req . subject name_id_value = subject . name_id . text if subject else None nameid_formats = { "from_policy" : authn_req . name_id_policy and authn_req . name_id_policy . format , "from_response" : subject and subject . name_id and subject . name_id . format , "from_metadata" : ( idp . metadata [ requester ] . get ( "spsso_descriptor" , [ { } ] ) [ 0 ] . get ( "name_id_format" , [ { } ] ) [ 0 ] . get ( "text" ) ) , "default" : NAMEID_FORMAT_TRANSIENT , } name_id_format = ( nameid_formats [ "from_policy" ] or ( nameid_formats [ "from_response" ] != NAMEID_FORMAT_UNSPECIFIED and nameid_formats [ "from_response" ] ) or nameid_formats [ "from_metadata" ] or nameid_formats [ "from_response" ] or nameid_formats [ "default" ] ) requester_name = self . _get_sp_display_name ( idp , requester ) internal_req = InternalData ( subject_id = name_id_value , subject_type = name_id_format , requester = requester , requester_name = requester_name , ) idp_policy = idp . config . getattr ( "policy" , "idp" ) if idp_policy : internal_req . attributes = self . _get_approved_attributes ( idp , idp_policy , requester , context . state ) return self . auth_req_callback_func ( context , internal_req )
See doc for handle_authn_request method .
622
11
230,909
def _get_approved_attributes ( self , idp , idp_policy , sp_entity_id , state ) : name_format = idp_policy . get_name_form ( sp_entity_id ) attrconvs = idp . config . attribute_converters idp_policy . acs = attrconvs attribute_filter = [ ] for aconv in attrconvs : if aconv . name_format == name_format : all_attributes = { v : None for v in aconv . _fro . values ( ) } attribute_filter = list ( idp_policy . restrict ( all_attributes , sp_entity_id , idp . metadata ) . keys ( ) ) break attribute_filter = self . converter . to_internal_filter ( self . attribute_profile , attribute_filter ) satosa_logging ( logger , logging . DEBUG , "Filter: %s" % attribute_filter , state ) return attribute_filter
Returns a list of approved attributes
217
6
230,910
def _build_idp_config_endpoints ( self , config , providers ) : # Add an endpoint to each provider idp_endpoints = [ ] for endp_category in self . endpoints : for func , endpoint in self . endpoints [ endp_category ] . items ( ) : for provider in providers : _endpoint = "{base}/{provider}/{endpoint}" . format ( base = self . base_url , provider = provider , endpoint = endpoint ) idp_endpoints . append ( ( _endpoint , func ) ) config [ "service" ] [ "idp" ] [ "endpoints" ] [ endp_category ] = idp_endpoints return config
Builds the final frontend module config
154
8
230,911
def _load_endpoints_to_config ( self , provider , target_entity_id , config = None ) : idp_conf = copy . deepcopy ( config or self . idp_config ) for service , endpoint in self . endpoints . items ( ) : idp_endpoints = [ ] for binding , path in endpoint . items ( ) : url = "{base}/{provider}/{target_id}/{path}" . format ( base = self . base_url , provider = provider , target_id = target_entity_id , path = path ) idp_endpoints . append ( ( url , binding ) ) idp_conf [ "service" ] [ "idp" ] [ "endpoints" ] [ service ] = idp_endpoints return idp_conf
Loads approved endpoints to the config .
176
9
230,912
def _load_idp_dynamic_entity_id ( self , state ) : # Change the idp entity id dynamically idp_config_file = copy . deepcopy ( self . idp_config ) idp_config_file [ "entityid" ] = "{}/{}" . format ( self . idp_config [ "entityid" ] , state [ self . name ] [ "target_entity_id" ] ) idp_config = IdPConfig ( ) . load ( idp_config_file , metadata_construction = False ) return Server ( config = idp_config )
Loads an idp server with the entity id saved in state
133
13
230,913
def _get_co_name_from_path ( self , context ) : url_encoded_co_name = context . path . split ( "/" ) [ 1 ] co_name = unquote_plus ( url_encoded_co_name ) return co_name
The CO name is URL encoded and obtained from the request path for a request coming into one of the standard binding endpoints . For example the HTTP - Redirect binding request path will have the format
60
39
230,914
def _get_co_name ( self , context ) : try : co_name = context . state [ self . name ] [ self . KEY_CO_NAME ] logger . debug ( "Found CO {} from state" . format ( co_name ) ) except KeyError : co_name = self . _get_co_name_from_path ( context ) logger . debug ( "Found CO {} from request path" . format ( co_name ) ) return co_name
Obtain the CO name previously saved in the request state or if not set use the request path obtained from the current context to determine the target CO .
102
30
230,915
def _add_endpoints_to_config ( self , config , co_name , backend_name ) : for service , endpoint in self . endpoints . items ( ) : idp_endpoints = [ ] for binding , path in endpoint . items ( ) : url = "{base}/{backend}/{co_name}/{path}" . format ( base = self . base_url , backend = backend_name , co_name = quote_plus ( co_name ) , path = path ) mapping = ( url , binding ) idp_endpoints . append ( mapping ) # Overwrite the IdP config with the CO specific mappings between # SAML binding and URL endpoints. config [ "service" ] [ "idp" ] [ "endpoints" ] [ service ] = idp_endpoints return config
Use the request path from the context to determine the target backend then construct mappings from bindings to endpoints for the virtual IdP for the CO .
180
30
230,916
def _add_entity_id ( self , config , co_name ) : base_entity_id = config [ 'entityid' ] co_entity_id = "{}/{}" . format ( base_entity_id , quote_plus ( co_name ) ) config [ 'entityid' ] = co_entity_id return config
Use the CO name to construct the entity ID for the virtual IdP for the CO .
74
18
230,917
def _overlay_for_saml_metadata ( self , config , co_name ) : for co in self . config [ self . KEY_CO ] : if co [ self . KEY_ENCODEABLE_NAME ] == co_name : break key = self . KEY_ORGANIZATION if key in co : if key not in config : config [ key ] = { } for org_key in self . KEY_ORGANIZATION_KEYS : if org_key in co [ key ] : config [ key ] [ org_key ] = co [ key ] [ org_key ] key = self . KEY_CONTACT_PERSON if key in co : config [ key ] = co [ key ] return config
Overlay configuration details like organization and contact person from the front end configuration onto the IdP configuration to support SAML metadata generation .
155
26
230,918
def _co_names_from_config ( self ) : co_names = [ co [ self . KEY_ENCODEABLE_NAME ] for co in self . config [ self . KEY_CO ] ] return co_names
Parse the configuration for the names of the COs for which to construct virtual IdPs .
48
19
230,919
def _create_co_virtual_idp ( self , context ) : co_name = self . _get_co_name ( context ) context . decorate ( self . KEY_CO_NAME , co_name ) # Verify that we are configured for this CO. If the CO was not # configured most likely the endpoint used was not registered and # SATOSA core code threw an exception before getting here, but we # include this check in case later the regex used to register the # endpoints is relaxed. co_names = self . _co_names_from_config ( ) if co_name not in co_names : msg = "CO {} not in configured list of COs {}" . format ( co_name , co_names ) satosa_logging ( logger , logging . WARN , msg , context . state ) raise SATOSAError ( msg ) # Make a copy of the general IdP config that we will then overwrite # with mappings between SAML bindings and CO specific URL endpoints, # and the entityID for the CO virtual IdP. backend_name = context . target_backend idp_config = copy . deepcopy ( self . idp_config ) idp_config = self . _add_endpoints_to_config ( idp_config , co_name , backend_name ) idp_config = self . _add_entity_id ( idp_config , co_name ) # Use the overwritten IdP config to generate a pysaml2 config object # and from it a server object. pysaml2_idp_config = IdPConfig ( ) . load ( idp_config , metadata_construction = False ) server = Server ( config = pysaml2_idp_config ) return server
Create a virtual IdP to represent the CO .
380
10
230,920
def _authn_response ( self , context ) : state_data = context . state [ self . name ] aresp = self . consumer . parse_response ( AuthorizationResponse , info = json . dumps ( context . request ) ) self . _verify_state ( aresp , state_data , context . state ) rargs = { "code" : aresp [ "code" ] , "redirect_uri" : self . redirect_url , "state" : state_data [ "state" ] } atresp = self . consumer . do_access_token_request ( request_args = rargs , state = aresp [ "state" ] ) if "verify_accesstoken_state" not in self . config or self . config [ "verify_accesstoken_state" ] : self . _verify_state ( atresp , state_data , context . state ) user_info = self . user_information ( atresp [ "access_token" ] ) internal_response = InternalData ( auth_info = self . auth_info ( context . request ) ) internal_response . attributes = self . converter . to_internal ( self . external_type , user_info ) internal_response . subject_id = user_info [ self . user_id_attr ] del context . state [ self . name ] return self . auth_callback_func ( context , internal_response )
Handles the authentication response from the AS .
306
9
230,921
def hash_data ( salt , value , hash_alg = None ) : hash_alg = hash_alg or 'sha512' hasher = hashlib . new ( hash_alg ) hasher . update ( value . encode ( 'utf-8' ) ) hasher . update ( salt . encode ( 'utf-8' ) ) value_hashed = hasher . hexdigest ( ) return value_hashed
Hashes a value together with a salt with the given hash algorithm .
90
14
230,922
def _construct_filter_value ( self , candidate , data ) : context = self . context attributes = data . attributes satosa_logging ( logger , logging . DEBUG , "Input attributes {}" . format ( attributes ) , context . state ) # Get the values configured list of identifier names for this candidate # and substitute None if there are no values for a configured identifier. values = [ ] for identifier_name in candidate [ 'attribute_names' ] : v = attributes . get ( identifier_name , None ) if isinstance ( v , list ) : v = v [ 0 ] values . append ( v ) satosa_logging ( logger , logging . DEBUG , "Found candidate values {}" . format ( values ) , context . state ) # If one of the configured identifier names is name_id then if there is also a configured # name_id_format add the value for the NameID of that format if it was asserted by the IdP # or else add the value None. if 'name_id' in candidate [ 'attribute_names' ] : candidate_nameid_value = None candidate_name_id_format = candidate . get ( 'name_id_format' ) name_id_value = data . subject_id name_id_format = data . subject_type if ( name_id_value and candidate_name_id_format and candidate_name_id_format == name_id_format ) : satosa_logging ( logger , logging . DEBUG , "IdP asserted NameID {}" . format ( name_id_value ) , context . state ) candidate_nameid_value = name_id_value # Only add the NameID value asserted by the IdP if it is not already # in the list of values. This is necessary because some non-compliant IdPs # have been known, for example, to assert the value of eduPersonPrincipalName # in the value for SAML2 persistent NameID as well as asserting # eduPersonPrincipalName. if candidate_nameid_value not in values : satosa_logging ( logger , logging . DEBUG , "Added NameID {} to candidate values" . format ( candidate_nameid_value ) , context . state ) values . append ( candidate_nameid_value ) else : satosa_logging ( logger , logging . WARN , "NameID {} value also asserted as attribute value" . format ( candidate_nameid_value ) , context . state ) # If no value was asserted by the IdP for one of the configured list of identifier names # for this candidate then go onto the next candidate. if None in values : satosa_logging ( logger , logging . DEBUG , "Candidate is missing value so skipping" , context . state ) return None # All values for the configured list of attribute names are present # so we can create a value. Add a scope if configured # to do so. if 'add_scope' in candidate : if candidate [ 'add_scope' ] == 'issuer_entityid' : scope = data . auth_info . issuer else : scope = candidate [ 'add_scope' ] satosa_logging ( logger , logging . DEBUG , "Added scope {} to values" . format ( scope ) , context . state ) values . append ( scope ) # Concatenate all values to create the filter value. value = '' . join ( values ) satosa_logging ( logger , logging . DEBUG , "Constructed filter value {}" . format ( value ) , context . state ) return value
Construct and return a LDAP directory search filter value from the candidate identifier .
753
15
230,923
def _filter_config ( self , config , fields = None ) : filter_fields_default = [ 'bind_password' , 'connection' ] filter_fields = fields or filter_fields_default return dict ( map ( lambda key : ( key , '<hidden>' if key in filter_fields else config [ key ] ) , config . keys ( ) ) )
Filter sensitive details like passwords from a configuration dictionary .
79
10
230,924
def _ldap_connection_factory ( self , config ) : ldap_url = config [ 'ldap_url' ] bind_dn = config [ 'bind_dn' ] bind_password = config [ 'bind_password' ] if not ldap_url : raise LdapAttributeStoreError ( "ldap_url is not configured" ) if not bind_dn : raise LdapAttributeStoreError ( "bind_dn is not configured" ) if not bind_password : raise LdapAttributeStoreError ( "bind_password is not configured" ) pool_size = config [ 'pool_size' ] pool_keepalive = config [ 'pool_keepalive' ] server = ldap3 . Server ( config [ 'ldap_url' ] ) satosa_logging ( logger , logging . DEBUG , "Creating a new LDAP connection" , None ) satosa_logging ( logger , logging . DEBUG , "Using LDAP URL {}" . format ( ldap_url ) , None ) satosa_logging ( logger , logging . DEBUG , "Using bind DN {}" . format ( bind_dn ) , None ) satosa_logging ( logger , logging . DEBUG , "Using pool size {}" . format ( pool_size ) , None ) satosa_logging ( logger , logging . DEBUG , "Using pool keep alive {}" . format ( pool_keepalive ) , None ) try : connection = ldap3 . Connection ( server , bind_dn , bind_password , auto_bind = True , client_strategy = ldap3 . REUSABLE , pool_size = pool_size , pool_keepalive = pool_keepalive ) except LDAPException as e : msg = "Caught exception when connecting to LDAP server: {}" . format ( e ) satosa_logging ( logger , logging . ERROR , msg , None ) raise LdapAttributeStoreError ( msg ) satosa_logging ( logger , logging . DEBUG , "Successfully connected to LDAP server" , None ) return connection
Use the input configuration to instantiate and return a ldap3 Connection object .
454
17
230,925
def _populate_attributes ( self , config , record , context , data ) : search_return_attributes = config [ 'search_return_attributes' ] for attr in search_return_attributes . keys ( ) : if attr in record [ "attributes" ] : if record [ "attributes" ] [ attr ] : data . attributes [ search_return_attributes [ attr ] ] = record [ "attributes" ] [ attr ] satosa_logging ( logger , logging . DEBUG , "Setting internal attribute {} with values {}" . format ( search_return_attributes [ attr ] , record [ "attributes" ] [ attr ] ) , context . state ) else : satosa_logging ( logger , logging . DEBUG , "Not setting internal attribute {} because value {} is null or empty" . format ( search_return_attributes [ attr ] , record [ "attributes" ] [ attr ] ) , context . state )
Use a record found in LDAP to populate attributes .
215
11
230,926
def _populate_input_for_name_id ( self , config , record , context , data ) : user_id = "" user_id_from_attrs = config [ 'user_id_from_attrs' ] for attr in user_id_from_attrs : if attr in record [ "attributes" ] : value = record [ "attributes" ] [ attr ] if isinstance ( value , list ) : # Use a default sort to ensure some predictability since the # LDAP directory server may return multi-valued attributes # in any order. value . sort ( ) user_id += "" . join ( value ) satosa_logging ( logger , logging . DEBUG , "Added attribute {} with values {} to input for NameID" . format ( attr , value ) , context . state ) else : user_id += value satosa_logging ( logger , logging . DEBUG , "Added attribute {} with value {} to input for NameID" . format ( attr , value ) , context . state ) if not user_id : satosa_logging ( logger , logging . WARNING , "Input for NameID is empty so not overriding default" , context . state ) else : data . subject_id = user_id satosa_logging ( logger , logging . DEBUG , "Input for NameID is {}" . format ( data . subject_id ) , context . state )
Use a record found in LDAP to populate input for NameID generation .
304
15
230,927
def _verify_dict ( self , conf ) : if not conf : raise SATOSAConfigurationError ( "Missing configuration or unknown format" ) for key in SATOSAConfig . mandatory_dict_keys : if key not in conf : raise SATOSAConfigurationError ( "Missing key '%s' in config" % key ) for key in SATOSAConfig . sensitive_dict_keys : if key not in conf and "SATOSA_{key}" . format ( key = key ) not in os . environ : raise SATOSAConfigurationError ( "Missing key '%s' from config and ENVIRONMENT" % key )
Check that the configuration contains all necessary keys .
147
9
230,928
def _load_yaml ( self , config_file ) : try : with open ( config_file ) as f : return yaml . safe_load ( f . read ( ) ) except yaml . YAMLError as exc : logger . error ( "Could not parse config as YAML: {}" , str ( exc ) ) if hasattr ( exc , 'problem_mark' ) : mark = exc . problem_mark logger . error ( "Error position: (%s:%s)" % ( mark . line + 1 , mark . column + 1 ) ) except IOError as e : logger . debug ( "Could not open config file: {}" , str ( e ) ) return None
Load config from yaml file or string
150
8
230,929
def satosa_logging ( logger , level , message , state , * * kwargs ) : if state is None : session_id = "UNKNOWN" else : try : session_id = state [ LOGGER_STATE_KEY ] except KeyError : session_id = uuid4 ( ) . urn state [ LOGGER_STATE_KEY ] = session_id logger . log ( level , "[{id}] {msg}" . format ( id = session_id , msg = message ) , * * kwargs )
Adds a session ID to the message .
115
8
230,930
def process ( self , context , internal_response ) : consent_state = context . state [ STATE_KEY ] internal_response . attributes = self . _filter_attributes ( internal_response . attributes , consent_state [ "filter" ] ) id_hash = self . _get_consent_id ( internal_response . requester , internal_response . subject_id , internal_response . attributes ) try : # Check if consent is already given consent_attributes = self . _verify_consent ( id_hash ) except requests . exceptions . ConnectionError as e : satosa_logging ( logger , logging . ERROR , "Consent service is not reachable, no consent given." , context . state ) # Send an internal_response without any attributes internal_response . attributes = { } return self . _end_consent ( context , internal_response ) # Previous consent was given if consent_attributes is not None : satosa_logging ( logger , logging . DEBUG , "Previous consent was given" , context . state ) internal_response . attributes = self . _filter_attributes ( internal_response . attributes , consent_attributes ) return self . _end_consent ( context , internal_response ) # No previous consent, request consent by user return self . _approve_new_consent ( context , internal_response , id_hash )
Manage consent and attribute filtering
295
6
230,931
def _get_consent_id ( self , requester , user_id , filtered_attr ) : filtered_attr_key_list = sorted ( filtered_attr . keys ( ) ) hash_str = "" for key in filtered_attr_key_list : _hash_value = "" . join ( sorted ( filtered_attr [ key ] ) ) hash_str += key + _hash_value id_string = "%s%s%s" % ( requester , user_id , hash_str ) return urlsafe_b64encode ( hashlib . sha512 ( id_string . encode ( "utf-8" ) ) . hexdigest ( ) . encode ( "utf-8" ) ) . decode ( "utf-8" )
Get a hashed id based on requester user id and filtered attributes
165
14
230,932
def _consent_registration ( self , consent_args ) : jws = JWS ( json . dumps ( consent_args ) , alg = self . signing_key . alg ) . sign_compact ( [ self . signing_key ] ) request = "{}/creq/{}" . format ( self . api_url , jws ) res = requests . get ( request ) if res . status_code != 200 : raise UnexpectedResponseError ( "Consent service error: %s %s" , res . status_code , res . text ) return res . text
Register a request at the consent service
127
7
230,933
def _verify_consent ( self , consent_id ) : request = "{}/verify/{}" . format ( self . api_url , consent_id ) res = requests . get ( request ) if res . status_code == 200 : return json . loads ( res . text ) return None
Connects to the consent service using the REST api and checks if the user has given consent
66
18
230,934
def _end_consent ( self , context , internal_response ) : del context . state [ STATE_KEY ] return super ( ) . process ( context , internal_response )
Clear the state for consent and end the consent step
38
10
230,935
def constructPrimaryIdentifier ( self , data , ordered_identifier_candidates ) : logprefix = PrimaryIdentifier . logprefix context = self . context attributes = data . attributes satosa_logging ( logger , logging . DEBUG , "{} Input attributes {}" . format ( logprefix , attributes ) , context . state ) value = None for candidate in ordered_identifier_candidates : satosa_logging ( logger , logging . DEBUG , "{} Considering candidate {}" . format ( logprefix , candidate ) , context . state ) # Get the values asserted by the IdP for the configured list of attribute names for this candidate # and substitute None if the IdP did not assert any value for a configured attribute. values = [ attributes . get ( attribute_name , [ None ] ) [ 0 ] for attribute_name in candidate [ 'attribute_names' ] ] satosa_logging ( logger , logging . DEBUG , "{} Found candidate values {}" . format ( logprefix , values ) , context . state ) # If one of the configured attribute names is name_id then if there is also a configured # name_id_format add the value for the NameID of that format if it was asserted by the IdP # or else add the value None. if 'name_id' in candidate [ 'attribute_names' ] : candidate_nameid_value = None candidate_nameid_value = None candidate_name_id_format = candidate . get ( 'name_id_format' ) name_id_value = data . subject_id name_id_format = data . subject_type if ( name_id_value and candidate_name_id_format and candidate_name_id_format == name_id_format ) : satosa_logging ( logger , logging . DEBUG , "{} IdP asserted NameID {}" . format ( logprefix , name_id_value ) , context . state ) candidate_nameid_value = name_id_value # Only add the NameID value asserted by the IdP if it is not already # in the list of values. This is necessary because some non-compliant IdPs # have been known, for example, to assert the value of eduPersonPrincipalName # in the value for SAML2 persistent NameID as well as asserting # eduPersonPrincipalName. if candidate_nameid_value not in values : satosa_logging ( logger , logging . DEBUG , "{} Added NameID {} to candidate values" . format ( logprefix , candidate_nameid_value ) , context . state ) values . append ( candidate_nameid_value ) else : satosa_logging ( logger , logging . WARN , "{} NameID {} value also asserted as attribute value" . format ( logprefix , candidate_nameid_value ) , context . state ) # If no value was asserted by the IdP for one of the configured list of attribute names # for this candidate then go onto the next candidate. if None in values : satosa_logging ( logger , logging . DEBUG , "{} Candidate is missing value so skipping" . format ( logprefix ) , context . state ) continue # All values for the configured list of attribute names are present # so we can create a primary identifer. Add a scope if configured # to do so. if 'add_scope' in candidate : if candidate [ 'add_scope' ] == 'issuer_entityid' : scope = data . auth_info . issuer else : scope = candidate [ 'add_scope' ] satosa_logging ( logger , logging . DEBUG , "{} Added scope {} to values" . format ( logprefix , scope ) , context . state ) values . append ( scope ) # Concatenate all values to create the primary identifier. value = '' . join ( values ) break return value
Construct and return a primary identifier value from the data asserted by the IdP using the ordered list of candidates from the configuration .
815
25
230,936
def state_to_cookie ( state , name , path , encryption_key ) : cookie_data = "" if state . delete else state . urlstate ( encryption_key ) max_age = 0 if state . delete else STATE_COOKIE_MAX_AGE satosa_logging ( logger , logging . DEBUG , "Saving state as cookie, secure: %s, max-age: %s, path: %s" % ( STATE_COOKIE_SECURE , STATE_COOKIE_MAX_AGE , path ) , state ) cookie = SimpleCookie ( ) cookie [ name ] = cookie_data cookie [ name ] [ "secure" ] = STATE_COOKIE_SECURE cookie [ name ] [ "path" ] = path cookie [ name ] [ "max-age" ] = max_age return cookie
Saves a state to a cookie
179
7
230,937
def cookie_to_state ( cookie_str , name , encryption_key ) : try : cookie = SimpleCookie ( cookie_str ) state = State ( cookie [ name ] . value , encryption_key ) except KeyError as e : msg_tmpl = 'No cookie named {name} in {data}' msg = msg_tmpl . format ( name = name , data = cookie_str ) logger . exception ( msg ) raise SATOSAStateError ( msg ) from e except ValueError as e : msg_tmpl = 'Failed to process {name} from {data}' msg = msg_tmpl . format ( name = name , data = cookie_str ) logger . exception ( msg ) raise SATOSAStateError ( msg ) from e else : msg_tmpl = 'Loading state from cookie {data}' msg = msg_tmpl . format ( data = cookie_str ) satosa_logging ( logger , logging . DEBUG , msg , state ) return state
Loads a state from a cookie
213
7
230,938
def encrypt ( self , raw ) : raw = self . _pad ( raw ) iv = Random . new ( ) . read ( AES . block_size ) cipher = AES . new ( self . key , AES . MODE_CBC , iv ) return base64 . urlsafe_b64encode ( iv + cipher . encrypt ( raw ) )
Encryptes the parameter raw .
74
7
230,939
def _pad ( self , b ) : return b + ( self . bs - len ( b ) % self . bs ) * chr ( self . bs - len ( b ) % self . bs ) . encode ( "UTF-8" )
Will padd the param to be of the correct length for the encryption alg .
56
16
230,940
def urlstate ( self , encryption_key ) : lzma = LZMACompressor ( ) urlstate_data = json . dumps ( self . _state_dict ) urlstate_data = lzma . compress ( urlstate_data . encode ( "UTF-8" ) ) urlstate_data += lzma . flush ( ) urlstate_data = _AESCipher ( encryption_key ) . encrypt ( urlstate_data ) lzma = LZMACompressor ( ) urlstate_data = lzma . compress ( urlstate_data ) urlstate_data += lzma . flush ( ) urlstate_data = base64 . urlsafe_b64encode ( urlstate_data ) return urlstate_data . decode ( "utf-8" )
Will return a url safe representation of the state .
173
10
230,941
def copy ( self ) : state_copy = State ( ) state_copy . _state_dict = copy . deepcopy ( self . _state_dict ) return state_copy
Returns a deepcopy of the state
38
7
230,942
def saml_name_id_format_to_hash_type ( name_format ) : msg = "saml_name_id_format_to_hash_type is deprecated and will be removed." _warnings . warn ( msg , DeprecationWarning ) name_id_format_to_hash_type = { NAMEID_FORMAT_TRANSIENT : UserIdHashType . transient , NAMEID_FORMAT_PERSISTENT : UserIdHashType . persistent , NAMEID_FORMAT_EMAILADDRESS : UserIdHashType . emailaddress , NAMEID_FORMAT_UNSPECIFIED : UserIdHashType . unspecified , } return name_id_format_to_hash_type . get ( name_format , UserIdHashType . transient )
Translate pySAML2 name format to satosa format
171
12
230,943
def hash_type_to_saml_name_id_format ( hash_type ) : msg = "hash_type_to_saml_name_id_format is deprecated and will be removed." _warnings . warn ( msg , DeprecationWarning ) hash_type_to_name_id_format = { UserIdHashType . transient : NAMEID_FORMAT_TRANSIENT , UserIdHashType . persistent : NAMEID_FORMAT_PERSISTENT , UserIdHashType . emailaddress : NAMEID_FORMAT_EMAILADDRESS , UserIdHashType . unspecified : NAMEID_FORMAT_UNSPECIFIED , } return hash_type_to_name_id_format . get ( hash_type , NAMEID_FORMAT_PERSISTENT )
Translate satosa format to pySAML2 name format
175
12
230,944
def save_state ( internal_request , state ) : state_data = { "hash_type" : internal_request . user_id_hash_type } state [ UserIdHasher . STATE_KEY ] = state_data
Saves all necessary information needed by the UserIdHasher
50
12
230,945
def hash_id ( salt , user_id , requester , state ) : hash_type_to_format = { NAMEID_FORMAT_TRANSIENT : "{id}{req}{time}" , NAMEID_FORMAT_PERSISTENT : "{id}{req}" , "pairwise" : "{id}{req}" , "public" : "{id}" , NAMEID_FORMAT_EMAILADDRESS : "{id}" , NAMEID_FORMAT_UNSPECIFIED : "{id}" , } format_args = { "id" : user_id , "req" : requester , "time" : datetime . datetime . utcnow ( ) . timestamp ( ) , } hash_type = UserIdHasher . hash_type ( state ) try : fmt = hash_type_to_format [ hash_type ] except KeyError as e : raise ValueError ( "Unknown hash type: {}" . format ( hash_type ) ) from e else : user_id = fmt . format ( * * format_args ) hasher = ( ( lambda salt , value : value ) if hash_type in [ NAMEID_FORMAT_EMAILADDRESS , NAMEID_FORMAT_UNSPECIFIED ] else util . hash_data ) return hasher ( salt , user_id )
Sets a user id to the internal_response in the format specified by the internal response
284
18
230,946
def load_backends ( config , callback , internal_attributes ) : backend_modules = _load_plugins ( config . get ( "CUSTOM_PLUGIN_MODULE_PATHS" ) , config [ "BACKEND_MODULES" ] , backend_filter , config [ "BASE" ] , internal_attributes , callback ) logger . info ( "Setup backends: %s" % [ backend . name for backend in backend_modules ] ) return backend_modules
Load all backend modules specified in the config
107
8
230,947
def load_frontends ( config , callback , internal_attributes ) : frontend_modules = _load_plugins ( config . get ( "CUSTOM_PLUGIN_MODULE_PATHS" ) , config [ "FRONTEND_MODULES" ] , frontend_filter , config [ "BASE" ] , internal_attributes , callback ) logger . info ( "Setup frontends: %s" % [ frontend . name for frontend in frontend_modules ] ) return frontend_modules
Load all frontend modules specified in the config
114
9
230,948
def _micro_service_filter ( cls ) : is_microservice_module = issubclass ( cls , MicroService ) is_correct_subclass = cls != MicroService and cls != ResponseMicroService and cls != RequestMicroService return is_microservice_module and is_correct_subclass
Will only give a find on classes that is a subclass of MicroService with the exception that the class is not allowed to be a direct ResponseMicroService or RequestMicroService .
69
35
230,949
def _load_plugins ( plugin_paths , plugins , plugin_filter , base_url , internal_attributes , callback ) : loaded_plugin_modules = [ ] with prepend_to_import_path ( plugin_paths ) : for plugin_config in plugins : try : module_class = _load_endpoint_module ( plugin_config , plugin_filter ) except SATOSAConfigurationError as e : raise SATOSAConfigurationError ( "Configuration error in {}" . format ( json . dumps ( plugin_config ) ) ) from e if module_class : module_config = _replace_variables_in_plugin_module_config ( plugin_config [ "config" ] , base_url , plugin_config [ "name" ] ) instance = module_class ( callback , internal_attributes , module_config , base_url , plugin_config [ "name" ] ) loaded_plugin_modules . append ( instance ) return loaded_plugin_modules
Loads endpoint plugins
214
4
230,950
def create_and_write_saml_metadata ( proxy_conf , key , cert , dir , valid , split_frontend_metadata = False , split_backend_metadata = False ) : satosa_config = SATOSAConfig ( proxy_conf ) secc = _get_security_context ( key , cert ) frontend_entities , backend_entities = create_entity_descriptors ( satosa_config ) output = [ ] if frontend_entities : if split_frontend_metadata : output . extend ( _create_split_entity_descriptors ( frontend_entities , secc , valid ) ) else : output . extend ( _create_merged_entities_descriptors ( frontend_entities , secc , valid , "frontend.xml" ) ) if backend_entities : if split_backend_metadata : output . extend ( _create_split_entity_descriptors ( backend_entities , secc , valid ) ) else : output . extend ( _create_merged_entities_descriptors ( backend_entities , secc , valid , "backend.xml" ) ) for metadata , filename in output : path = os . path . join ( dir , filename ) print ( "Writing metadata to '{}'" . format ( path ) ) with open ( path , "w" ) as f : f . write ( metadata )
Generates SAML metadata for the given PROXY_CONF signed with the given KEY and associated CERT .
310
23
230,951
def _handle_al_response ( self , context ) : saved_state = context . state [ self . name ] internal_response = InternalData . from_dict ( saved_state ) #subject_id here is the linked id , not the facebook one, Figure out what to do status_code , message = self . _get_uuid ( context , internal_response . auth_info . issuer , internal_response . attributes [ 'issuer_user_id' ] ) if status_code == 200 : satosa_logging ( logger , logging . INFO , "issuer/id pair is linked in AL service" , context . state ) internal_response . subject_id = message if self . id_to_attr : internal_response . attributes [ self . id_to_attr ] = [ message ] del context . state [ self . name ] return super ( ) . process ( context , internal_response ) else : # User selected not to link their accounts, so the internal.response.subject_id is based on the # issuers id/sub which is fine satosa_logging ( logger , logging . INFO , "User selected to not link their identity in AL service" , context . state ) del context . state [ self . name ] return super ( ) . process ( context , internal_response )
Endpoint for handling account linking service response . When getting here user might have approved or rejected linking their account
280
21
230,952
def process ( self , context , internal_response ) : status_code , message = self . _get_uuid ( context , internal_response . auth_info . issuer , internal_response . subject_id ) data = { "issuer" : internal_response . auth_info . issuer , "redirect_endpoint" : "%s/account_linking%s" % ( self . base_url , self . endpoint ) } # Store the issuer subject_id/sub because we'll need it in handle_al_response internal_response . attributes [ 'issuer_user_id' ] = internal_response . subject_id if status_code == 200 : satosa_logging ( logger , logging . INFO , "issuer/id pair is linked in AL service" , context . state ) internal_response . subject_id = message data [ 'user_id' ] = message if self . id_to_attr : internal_response . attributes [ self . id_to_attr ] = [ message ] else : satosa_logging ( logger , logging . INFO , "issuer/id pair is not linked in AL service. Got a ticket" , context . state ) data [ 'ticket' ] = message jws = JWS ( json . dumps ( data ) , alg = self . signing_key . alg ) . sign_compact ( [ self . signing_key ] ) context . state [ self . name ] = internal_response . to_dict ( ) return Redirect ( "%s/%s" % ( self . redirect_url , jws ) )
Manage account linking and recovery
343
6
230,953
def backend_routing ( self , context ) : satosa_logging ( logger , logging . DEBUG , "Routing to backend: %s " % context . target_backend , context . state ) backend = self . backends [ context . target_backend ] [ "instance" ] context . state [ STATE_KEY ] = context . target_frontend return backend
Returns the targeted backend and an updated state
80
8
230,954
def frontend_routing ( self , context ) : target_frontend = context . state [ STATE_KEY ] satosa_logging ( logger , logging . DEBUG , "Routing to frontend: %s " % target_frontend , context . state ) context . target_frontend = target_frontend frontend = self . frontends [ context . target_frontend ] [ "instance" ] return frontend
Returns the targeted frontend and original state
91
8
230,955
def endpoint_routing ( self , context ) : if context . path is None : satosa_logging ( logger , logging . DEBUG , "Context did not contain a path!" , context . state ) raise SATOSABadContextError ( "Context did not contain any path" ) satosa_logging ( logger , logging . DEBUG , "Routing path: %s" % context . path , context . state ) path_split = context . path . split ( "/" ) backend = path_split [ 0 ] if backend in self . backends : context . target_backend = backend else : satosa_logging ( logger , logging . DEBUG , "Unknown backend %s" % backend , context . state ) try : name , frontend_endpoint = self . _find_registered_endpoint ( context , self . frontends ) except ModuleRouter . UnknownEndpoint as e : pass else : context . target_frontend = name return frontend_endpoint try : name , micro_service_endpoint = self . _find_registered_endpoint ( context , self . micro_services ) except ModuleRouter . UnknownEndpoint as e : pass else : context . target_micro_service = name return micro_service_endpoint if backend in self . backends : backend_endpoint = self . _find_registered_backend_endpoint ( context ) if backend_endpoint : return backend_endpoint raise SATOSANoBoundEndpointError ( "'{}' not bound to any function" . format ( context . path ) )
Finds and returns the endpoint function bound to the path
334
11
230,956
def _auth_req_callback_func ( self , context , internal_request ) : state = context . state state [ STATE_KEY ] = { "requester" : internal_request . requester } # TODO consent module should manage any state it needs by itself try : state_dict = context . state [ consent . STATE_KEY ] except KeyError : state_dict = context . state [ consent . STATE_KEY ] = { } finally : state_dict . update ( { "filter" : internal_request . attributes or [ ] , "requester_name" : internal_request . requester_name , } ) satosa_logging ( logger , logging . INFO , "Requesting provider: {}" . format ( internal_request . requester ) , state ) if self . request_micro_services : return self . request_micro_services [ 0 ] . process ( context , internal_request ) return self . _auth_req_finish ( context , internal_request )
This function is called by a frontend module when an authorization request has been processed .
212
17
230,957
def _auth_resp_callback_func ( self , context , internal_response ) : context . request = None internal_response . requester = context . state [ STATE_KEY ] [ "requester" ] # If configured construct the user id from attribute values. if "user_id_from_attrs" in self . config [ "INTERNAL_ATTRIBUTES" ] : subject_id = [ "" . join ( internal_response . attributes [ attr ] ) for attr in self . config [ "INTERNAL_ATTRIBUTES" ] [ "user_id_from_attrs" ] ] internal_response . subject_id = "" . join ( subject_id ) if self . response_micro_services : return self . response_micro_services [ 0 ] . process ( context , internal_response ) return self . _auth_resp_finish ( context , internal_response )
This function is called by a backend module when the authorization is complete .
199
14
230,958
def _handle_satosa_authentication_error ( self , error ) : context = Context ( ) context . state = error . state frontend = self . module_router . frontend_routing ( context ) return frontend . handle_backend_error ( error )
Sends a response to the requester about the error
60
11
230,959
def _load_state ( self , context ) : try : state = cookie_to_state ( context . cookie , self . config [ "COOKIE_STATE_NAME" ] , self . config [ "STATE_ENCRYPTION_KEY" ] ) except SATOSAStateError as e : msg_tmpl = 'Failed to decrypt state {state} with {error}' msg = msg_tmpl . format ( state = context . cookie , error = str ( e ) ) satosa_logging ( logger , logging . WARNING , msg , None ) state = State ( ) finally : context . state = state
Load state from cookie to the context
135
7
230,960
def _save_state ( self , resp , context ) : cookie = state_to_cookie ( context . state , self . config [ "COOKIE_STATE_NAME" ] , "/" , self . config [ "STATE_ENCRYPTION_KEY" ] ) resp . headers . append ( tuple ( cookie . output ( ) . split ( ": " , 1 ) ) )
Saves a state from context to cookie
84
8
230,961
def run ( self , context ) : try : self . _load_state ( context ) spec = self . module_router . endpoint_routing ( context ) resp = self . _run_bound_endpoint ( context , spec ) self . _save_state ( resp , context ) except SATOSANoBoundEndpointError : raise except SATOSAError : satosa_logging ( logger , logging . ERROR , "Uncaught SATOSA error " , context . state , exc_info = True ) raise except UnknownSystemEntity as err : satosa_logging ( logger , logging . ERROR , "configuration error: unknown system entity " + str ( err ) , context . state , exc_info = False ) raise except Exception as err : satosa_logging ( logger , logging . ERROR , "Uncaught exception" , context . state , exc_info = True ) raise SATOSAUnknownError ( "Unknown error" ) from err return resp
Runs the satosa proxy with the given context .
207
11
230,962
def disco_query ( self ) : return_url = self . sp . config . getattr ( "endpoints" , "sp" ) [ "discovery_response" ] [ 0 ] [ 0 ] loc = self . sp . create_discovery_service_request ( self . discosrv , self . sp . config . entityid , * * { "return" : return_url } ) return SeeOther ( loc )
Makes a request to the discovery server
93
8
230,963
def authn_request ( self , context , entity_id ) : # If IDP blacklisting is enabled and the selected IDP is blacklisted, # stop here if self . idp_blacklist_file : with open ( self . idp_blacklist_file ) as blacklist_file : blacklist_array = json . load ( blacklist_file ) [ 'blacklist' ] if entity_id in blacklist_array : satosa_logging ( logger , logging . DEBUG , "IdP with EntityID {} is blacklisted" . format ( entity_id ) , context . state , exc_info = False ) raise SATOSAAuthenticationError ( context . state , "Selected IdP is blacklisted for this backend" ) kwargs = { } authn_context = self . construct_requested_authn_context ( entity_id ) if authn_context : kwargs [ 'requested_authn_context' ] = authn_context try : binding , destination = self . sp . pick_binding ( "single_sign_on_service" , None , "idpsso" , entity_id = entity_id ) satosa_logging ( logger , logging . DEBUG , "binding: %s, destination: %s" % ( binding , destination ) , context . state ) acs_endp , response_binding = self . sp . config . getattr ( "endpoints" , "sp" ) [ "assertion_consumer_service" ] [ 0 ] req_id , req = self . sp . create_authn_request ( destination , binding = response_binding , * * kwargs ) relay_state = util . rndstr ( ) ht_args = self . sp . apply_binding ( binding , "%s" % req , destination , relay_state = relay_state ) satosa_logging ( logger , logging . DEBUG , "ht_args: %s" % ht_args , context . state ) except Exception as exc : satosa_logging ( logger , logging . DEBUG , "Failed to construct the AuthnRequest for state" , context . state , exc_info = True ) raise SATOSAAuthenticationError ( context . state , "Failed to construct the AuthnRequest" ) from exc if self . sp . config . getattr ( 'allow_unsolicited' , 'sp' ) is False : if req_id in self . outstanding_queries : errmsg = "Request with duplicate id {}" . format ( req_id ) satosa_logging ( logger , logging . DEBUG , errmsg , context . state ) raise SATOSAAuthenticationError ( context . state , errmsg ) self . outstanding_queries [ req_id ] = req context . state [ self . name ] = { "relay_state" : relay_state } return make_saml_response ( binding , ht_args )
Do an authorization request on idp with given entity id . This is the start of the authorization .
631
20
230,964
def disco_response ( self , context ) : info = context . request state = context . state try : entity_id = info [ "entityID" ] except KeyError as err : satosa_logging ( logger , logging . DEBUG , "No IDP chosen for state" , state , exc_info = True ) raise SATOSAAuthenticationError ( state , "No IDP chosen" ) from err return self . authn_request ( context , entity_id )
Endpoint for the discovery server response
102
7
230,965
def _translate_response ( self , response , state ) : # The response may have been encrypted by the IdP so if we have an # encryption key, try it. if self . encryption_keys : response . parse_assertion ( self . encryption_keys ) authn_info = response . authn_info ( ) [ 0 ] auth_class_ref = authn_info [ 0 ] timestamp = response . assertion . authn_statement [ 0 ] . authn_instant issuer = response . response . issuer . text auth_info = AuthenticationInformation ( auth_class_ref , timestamp , issuer , ) # The SAML response may not include a NameID. subject = response . get_subject ( ) name_id = subject . text if subject else None name_id_format = subject . format if subject else None attributes = self . converter . to_internal ( self . attribute_profile , response . ava , ) internal_resp = InternalData ( auth_info = auth_info , attributes = attributes , subject_type = name_id_format , subject_id = name_id , ) satosa_logging ( logger , logging . DEBUG , "backend received attributes:\n%s" % json . dumps ( response . ava , indent = 4 ) , state ) return internal_resp
Translates a saml authorization response to an internal response
280
12
230,966
def to_internal_filter ( self , attribute_profile , external_attribute_names ) : try : profile_mapping = self . to_internal_attributes [ attribute_profile ] except KeyError : logger . warn ( "no attribute mapping found for the given attribute profile '%s'" , attribute_profile ) # no attributes since the given profile is not configured return [ ] internal_attribute_names = set ( ) # use set to ensure only unique values for external_attribute_name in external_attribute_names : try : internal_attribute_name = profile_mapping [ external_attribute_name ] internal_attribute_names . add ( internal_attribute_name ) except KeyError : pass return list ( internal_attribute_names )
Converts attribute names from external type to internal
157
9
230,967
def to_internal ( self , attribute_profile , external_dict ) : internal_dict = { } for internal_attribute_name , mapping in self . from_internal_attributes . items ( ) : if attribute_profile not in mapping : logger . debug ( "no attribute mapping found for internal attribute '%s' the attribute profile '%s'" % ( internal_attribute_name , attribute_profile ) ) # skip this internal attribute if we have no mapping in the specified profile continue external_attribute_name = mapping [ attribute_profile ] attribute_values = self . _collate_attribute_values_by_priority_order ( external_attribute_name , external_dict ) if attribute_values : # Only insert key if it has some values logger . debug ( "backend attribute '%s' mapped to %s" % ( external_attribute_name , internal_attribute_name ) ) internal_dict [ internal_attribute_name ] = attribute_values else : logger . debug ( "skipped backend attribute '%s': no value found" , external_attribute_name ) internal_dict = self . _handle_template_attributes ( attribute_profile , internal_dict ) return internal_dict
Converts the external data from type to internal
257
9
230,968
def from_internal ( self , attribute_profile , internal_dict ) : external_dict = { } for internal_attribute_name in internal_dict : try : attribute_mapping = self . from_internal_attributes [ internal_attribute_name ] except KeyError : logger . debug ( "no attribute mapping found for the internal attribute '%s'" , internal_attribute_name ) continue if attribute_profile not in attribute_mapping : # skip this internal attribute if we have no mapping in the specified profile logger . debug ( "no mapping found for '%s' in attribute profile '%s'" % ( internal_attribute_name , attribute_profile ) ) continue external_attribute_names = self . from_internal_attributes [ internal_attribute_name ] [ attribute_profile ] # select the first attribute name external_attribute_name = external_attribute_names [ 0 ] logger . debug ( "frontend attribute %s mapped from %s" % ( external_attribute_name , internal_attribute_name ) ) if self . separator in external_attribute_name : nested_attribute_names = external_attribute_name . split ( self . separator ) nested_dict = self . _create_nested_attribute_value ( nested_attribute_names [ 1 : ] , internal_dict [ internal_attribute_name ] ) external_dict [ nested_attribute_names [ 0 ] ] = nested_dict else : external_dict [ external_attribute_name ] = internal_dict [ internal_attribute_name ] return external_dict
Converts the internal data to type
331
7
230,969
def _serialize ( self , version , serialize_format , context , raise_if_incomplete = False ) : values = context . copy ( ) for k in version : values [ k ] = version [ k ] # TODO dump complete context on debug level try : # test whether all parts required in the format have values serialized = serialize_format . format ( * * values ) except KeyError as e : missing_key = getattr ( e , "message" , e . args [ 0 ] ) raise MissingValueForSerializationException ( "Did not find key {} in {} when serializing version number" . format ( repr ( missing_key ) , repr ( version ) ) ) keys_needing_representation = set ( ) found_required = False for k in self . order ( ) : v = values [ k ] if not isinstance ( v , VersionPart ) : # values coming from environment variables don't need # representation continue if not v . is_optional ( ) : found_required = True keys_needing_representation . add ( k ) elif not found_required : keys_needing_representation . add ( k ) required_by_format = set ( self . _labels_for_format ( serialize_format ) ) # try whether all parsed keys are represented if raise_if_incomplete : if not ( keys_needing_representation <= required_by_format ) : raise IncompleteVersionRepresentationException ( "Could not represent '{}' in format '{}'" . format ( "', '" . join ( keys_needing_representation ^ required_by_format ) , serialize_format , ) ) return serialized
Attempts to serialize a version with the given serialization format .
362
13
230,970
def value_text ( self ) : search = self . _selected . get ( ) # a string containing the selected option # This is a bit nasty - suggestions welcome for item in self . _rbuttons : if item . value == search : return item . text return ""
Sets or returns the option selected in a ButtonGroup by its text value .
57
16
230,971
def append ( self , option ) : self . _options . append ( self . _parse_option ( option ) ) self . _refresh_options ( ) self . resize ( self . _width , self . _height )
Appends a new option to the end of the ButtonGroup .
48
13
230,972
def insert ( self , index , option ) : self . _options . insert ( index , self . _parse_option ( option ) ) self . _refresh_options ( ) self . resize ( self . _width , self . _height )
Insert a new option in the ButtonGroup at index .
52
11
230,973
def remove ( self , option ) : for existing_option in self . _options : if existing_option [ 1 ] == option : self . _options . remove ( existing_option ) self . _refresh_options ( ) return True return False
Removes the first option from the ButtonGroup .
52
10
230,974
def update_command ( self , command , args = None ) : if command is None : self . _command = lambda : None else : if args is None : self . _command = command else : self . _command = utils . with_args ( command , * args )
Updates the callback command which is called when the ButtonGroup changes .
59
14
230,975
def insert ( self , index , option ) : option = str ( option ) self . _options . insert ( index , option ) # if this is the first option, set it. if len ( self . _options ) == 1 : self . value = option self . _refresh_options ( )
Insert a new option in the Combo at index .
63
10
230,976
def remove ( self , option ) : if option in self . _options : if len ( self . _options ) == 1 : # this is the last option in the list so clear it self . clear ( ) else : self . _options . remove ( option ) self . _refresh_options ( ) # have we just removed the selected option? # if so set it to the first option if option == self . value : self . _set_option ( self . _options [ 0 ] ) return True else : return False
Removes the first option from the Combo .
110
9
230,977
def clear ( self ) : self . _options = [ ] self . _combo_menu . tk . delete ( 0 , END ) self . _selected . set ( "" )
Clears all the options in a Combo
39
8
230,978
def _set_option ( self , value ) : if len ( self . _options ) > 0 : if value in self . _options : self . _selected . set ( value ) return True else : return False else : return False
Sets a single option in the Combo returning True if it was able too .
49
16
230,979
def _set_option_by_index ( self , index ) : if index < len ( self . _options ) : self . _selected . set ( self . _options [ index ] ) return True else : return False
Sets a single option in the Combo by its index returning True if it was able too .
47
19
230,980
def after ( self , time , function , args = [ ] ) : callback_id = self . tk . after ( time , self . _call_wrapper , time , function , * args ) self . _callback [ function ] = [ callback_id , False ]
Call function after time milliseconds .
57
6
230,981
def repeat ( self , time , function , args = [ ] ) : callback_id = self . tk . after ( time , self . _call_wrapper , time , function , * args ) self . _callback [ function ] = [ callback_id , True ]
Repeat function every time milliseconds .
57
6
230,982
def cancel ( self , function ) : if function in self . _callback . keys ( ) : callback_id = self . _callback [ function ] [ 0 ] self . tk . after_cancel ( callback_id ) self . _callback . pop ( function ) else : utils . error_format ( "Could not cancel function - it doesnt exist, it may have already run" )
Cancel the scheduled function calls .
83
7
230,983
def _call_wrapper ( self , time , function , * args ) : # execute the function function ( * args ) if function in self . _callback . keys ( ) : repeat = self . _callback [ function ] [ 1 ] if repeat : # setup the call back again and update the id callback_id = self . tk . after ( time , self . _call_wrapper , time , function , * args ) self . _callback [ function ] [ 0 ] = callback_id else : # remove it from the call back dictionary self . _callback . pop ( function )
Fired by tk . after gets the callback and either executes the function and cancels or repeats
122
20
230,984
def rebind ( self , tks ) : self . _tks = tks for tk in self . _tks : tk . unbind_all ( self . _tk_event ) func_id = tk . bind ( self . _tk_event , self . _event_callback ) self . _func_ids . append ( func_id )
Rebinds the tk event only used if a widget has been destroyed and recreated .
79
19
230,985
def rebind_events ( self , * tks ) : for ref in self . _refs : self . _refs [ ref ] . rebind ( tks )
Rebinds all the tk events only used if a tk widget has been destroyed and recreated .
37
22
230,986
def set_border ( self , thickness , color = "black" ) : self . _set_tk_config ( "highlightthickness" , thickness ) self . _set_tk_config ( "highlightbackground" , utils . convert_color ( color ) )
Sets the border thickness and color .
60
8
230,987
def hide ( self ) : self . tk . withdraw ( ) self . _visible = False if self . _modal : self . tk . grab_release ( )
Hide the window .
37
4
230,988
def show ( self , wait = False ) : self . tk . deiconify ( ) self . _visible = True self . _modal = wait if self . _modal : self . tk . grab_set ( )
Show the window .
50
4
230,989
def destroy ( self ) : # if this is the main_app - set the _main_app class variable to `None`. if self == App . _main_app : App . _main_app = None self . tk . destroy ( )
Destroy and close the App .
53
6
230,990
def line ( self , x1 , y1 , x2 , y2 , color = "black" , width = 1 ) : return self . tk . create_line ( x1 , y1 , x2 , y2 , width = width , fill = "" if color is None else utils . convert_color ( color ) )
Draws a line between 2 points
72
7
230,991
def oval ( self , x1 , y1 , x2 , y2 , color = "black" , outline = False , outline_color = "black" ) : return self . tk . create_oval ( x1 , y1 , x2 , y2 , outline = utils . convert_color ( outline_color ) if outline else "" , width = int ( outline ) , fill = "" if color is None else utils . convert_color ( color ) )
Draws an oval between 2 points
101
7
230,992
def rectangle ( self , x1 , y1 , x2 , y2 , color = "black" , outline = False , outline_color = "black" ) : return self . tk . create_rectangle ( x1 , y1 , x2 , y2 , outline = utils . convert_color ( outline_color ) if outline else "" , width = int ( outline ) , fill = "" if color is None else utils . convert_color ( color ) )
Draws a rectangle between 2 points
102
7
230,993
def polygon ( self , * coords , color = "black" , outline = False , outline_color = "black" ) : return self . tk . create_polygon ( * coords , outline = utils . convert_color ( outline_color ) if outline else "" , width = int ( outline ) , fill = "" if color is None else utils . convert_color ( color ) )
Draws a polygon from an list of co - ordinates
87
13
230,994
def triangle ( self , x1 , y1 , x2 , y2 , x3 , y3 , color = "black" , outline = False , outline_color = "black" ) : return self . polygon ( x1 , y1 , x2 , y2 , x3 , y3 , color = color , outline = outline , outline_color = outline_color )
Draws a triangle between 3 points
82
7
230,995
def _get_tk_config ( self , key , default = False ) : if default : return self . _tk_defaults [ key ] else : return self . tk [ key ]
Gets the config from the widget s tk object .
41
12
230,996
def _set_tk_config ( self , keys , value ) : # if a single key is passed, convert to list if isinstance ( keys , str ) : keys = [ keys ] # loop through all the keys for key in keys : if key in self . tk . keys ( ) : if value is None : # reset to default self . tk [ key ] = self . _tk_defaults [ key ] else : self . tk [ key ] = value
Gets the config from the widget s tk object
101
11
230,997
def destroy ( self ) : # if this widget has a master remove the it from the master if self . master is not None : self . master . _remove_child ( self ) self . tk . destroy ( )
Destroy the tk widget .
46
6
230,998
def add_tk_widget ( self , tk_widget , grid = None , align = None , visible = True , enabled = None , width = None , height = None ) : return Widget ( self , tk_widget , "tk widget" , grid , align , visible , enabled , width , height )
Adds a tk widget into a guizero container .
67
12
230,999
def display_widgets ( self ) : # All widgets are removed and then recreated to ensure the order they # were created is the order they are displayed. for child in self . children : if child . displayable : # forget the widget if self . layout != "grid" : child . tk . pack_forget ( ) else : child . tk . grid_forget ( ) # display the widget if child . visible : if self . layout != "grid" : self . _pack_widget ( child ) else : self . _grid_widget ( child )
Displays all the widgets associated with this Container .
121
10