idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
20,400
def logout ( self , all_session = False ) : # initialize the counter of the number of destroyed sesisons session_nb = 0 # save the current user username before flushing the session username = self . request . session . get ( "username" ) if username : if all_session : logger . info ( "Logging out user %s from all sessions." % username ) else : logger . info ( "Logging out user %s." % username ) users = [ ] # try to get the user from the current session try : users . append ( models . User . objects . get ( username = username , session_key = self . request . session . session_key ) ) except models . User . DoesNotExist : # if user not found in database, flush the session anyway self . request . session . flush ( ) # If all_session is set, search all of the user sessions if all_session : users . extend ( models . User . objects . filter ( username = username ) . exclude ( session_key = self . request . session . session_key ) ) # Iterate over all user sessions that have to be logged out for user in users : # get the user session session = SessionStore ( session_key = user . session_key ) # flush the session session . flush ( ) # send SLO requests user . logout ( self . request ) # delete the user user . delete ( ) # increment the destroyed session counter session_nb += 1 if username : logger . info ( "User %s logged out" % username ) return session_nb
effectively destroy a CAS session
331
6
20,401
def get_cas_client ( self , request , provider , renew = False ) : # compute the current url, ignoring ticket dans provider GET parameters service_url = utils . get_current_url ( request , { "ticket" , "provider" } ) self . service_url = service_url return CASFederateValidateUser ( provider , service_url , renew = renew )
return a CAS client object matching provider
85
7
20,402
def post ( self , request , provider = None ) : # if settings.CAS_FEDERATE is not True redirect to the login page if not settings . CAS_FEDERATE : logger . warning ( "CAS_FEDERATE is False, set it to True to use federation" ) return redirect ( "cas_server:login" ) # POST with a provider suffix, this is probably an SLO request. csrf is disabled for # allowing SLO requests reception try : provider = FederatedIendityProvider . objects . get ( suffix = provider ) auth = self . get_cas_client ( request , provider ) try : auth . clean_sessions ( request . POST [ 'logoutRequest' ] ) except ( KeyError , AttributeError ) : pass return HttpResponse ( "ok" ) # else, a User is trying to log in using an identity provider except FederatedIendityProvider . DoesNotExist : # Manually checking for csrf to protect the code below reason = CsrfViewMiddleware ( ) . process_view ( request , None , ( ) , { } ) if reason is not None : # pragma: no cover (csrf checks are disabled during tests) return reason # Failed the test, stop here. form = forms . FederateSelect ( request . POST ) if form . is_valid ( ) : params = utils . copy_params ( request . POST , ignore = { "provider" , "csrfmiddlewaretoken" , "ticket" , "lt" } ) if params . get ( "renew" ) == "False" : del params [ "renew" ] url = utils . reverse_params ( "cas_server:federateAuth" , kwargs = dict ( provider = form . cleaned_data [ "provider" ] . suffix ) , params = params ) return HttpResponseRedirect ( url ) else : return redirect ( "cas_server:login" )
method called on POST request
424
5
20,403
def get ( self , request , provider = None ) : # if settings.CAS_FEDERATE is not True redirect to the login page if not settings . CAS_FEDERATE : logger . warning ( "CAS_FEDERATE is False, set it to True to use federation" ) return redirect ( "cas_server:login" ) renew = bool ( request . GET . get ( 'renew' ) and request . GET [ 'renew' ] != "False" ) # Is the user is already authenticated, no need to request authentication to the user # identity provider. if self . request . session . get ( "authenticated" ) and not renew : logger . warning ( "User already authenticated, dropping federated authentication request" ) return redirect ( "cas_server:login" ) try : # get the identity provider from its suffix provider = FederatedIendityProvider . objects . get ( suffix = provider ) # get a CAS client for the user identity provider auth = self . get_cas_client ( request , provider , renew ) # if no ticket submited, redirect to the identity provider CAS login page if 'ticket' not in request . GET : logger . info ( "Trying to authenticate %s again" % auth . provider . server_url ) return HttpResponseRedirect ( auth . get_login_url ( ) ) else : ticket = request . GET [ 'ticket' ] try : # if the ticket validation succeed if auth . verify_ticket ( ticket ) : logger . info ( "Got a valid ticket for %s from %s" % ( auth . username , auth . provider . server_url ) ) params = utils . copy_params ( request . GET , ignore = { "ticket" , "remember" } ) request . session [ "federate_username" ] = auth . federated_username request . session [ "federate_ticket" ] = ticket auth . register_slo ( auth . federated_username , request . session . session_key , ticket ) # redirect to the the login page for the user to become authenticated # thanks to the `federate_username` and `federate_ticket` session parameters url = utils . reverse_params ( "cas_server:login" , params ) response = HttpResponseRedirect ( url ) # If the user has checked "remember my identity provider" store it in a # cookie if request . GET . get ( "remember" ) : max_age = settings . CAS_FEDERATE_REMEMBER_TIMEOUT utils . set_cookie ( response , "remember_provider" , provider . suffix , max_age ) return response # else redirect to the identity provider CAS login page else : logger . info ( ( "Got an invalid ticket %s from %s for service %s. " "Retrying authentication" ) % ( ticket , auth . provider . server_url , self . service_url ) ) return HttpResponseRedirect ( auth . get_login_url ( ) ) # both xml.etree.ElementTree and lxml.etree exceptions inherit from SyntaxError except SyntaxError as error : messages . add_message ( request , messages . ERROR , _ ( u"Invalid response from your identity provider CAS upon " u"ticket %(ticket)s validation: %(error)r" ) % { 'ticket' : ticket , 'error' : error } ) response = redirect ( "cas_server:login" ) response . delete_cookie ( "remember_provider" ) return response except FederatedIendityProvider . DoesNotExist : logger . warning ( "Identity provider suffix %s not found" % provider ) # if the identity provider is not found, redirect to the login page return redirect ( "cas_server:login" )
method called on GET request
813
5
20,404
def init_post ( self , request ) : self . request = request self . service = request . POST . get ( 'service' ) self . renew = bool ( request . POST . get ( 'renew' ) and request . POST [ 'renew' ] != "False" ) self . gateway = request . POST . get ( 'gateway' ) self . method = request . POST . get ( 'method' ) self . ajax = settings . CAS_ENABLE_AJAX_AUTH and 'HTTP_X_AJAX' in request . META if request . POST . get ( 'warned' ) and request . POST [ 'warned' ] != "False" : self . warned = True self . warn = request . POST . get ( 'warn' ) if settings . CAS_FEDERATE : self . username = request . POST . get ( 'username' ) # in federated mode, the valdated indentity provider CAS ticket is used as password self . ticket = request . POST . get ( 'password' )
Initialize POST received parameters
226
5
20,405
def gen_lt ( self ) : self . request . session [ 'lt' ] = self . request . session . get ( 'lt' , [ ] ) + [ utils . gen_lt ( ) ] if len ( self . request . session [ 'lt' ] ) > 100 : self . request . session [ 'lt' ] = self . request . session [ 'lt' ] [ - 100 : ]
Generate a new LoginTicket and add it to the list of valid LT for the user
88
19
20,406
def check_lt ( self ) : # save LT for later check lt_valid = self . request . session . get ( 'lt' , [ ] ) lt_send = self . request . POST . get ( 'lt' ) # generate a new LT (by posting the LT has been consumed) self . gen_lt ( ) # check if send LT is valid if lt_send not in lt_valid : return False else : self . request . session [ 'lt' ] . remove ( lt_send ) # we need to redo the affectation for django to detect that the list has changed # and for its new value to be store in the session self . request . session [ 'lt' ] = self . request . session [ 'lt' ] return True
Check is the POSTed LoginTicket is valid if yes invalide it
167
15
20,407
def init_get ( self , request ) : self . request = request self . service = request . GET . get ( 'service' ) self . renew = bool ( request . GET . get ( 'renew' ) and request . GET [ 'renew' ] != "False" ) self . gateway = request . GET . get ( 'gateway' ) self . method = request . GET . get ( 'method' ) self . ajax = settings . CAS_ENABLE_AJAX_AUTH and 'HTTP_X_AJAX' in request . META self . warn = request . GET . get ( 'warn' ) if settings . CAS_FEDERATE : # here username and ticket are fetch from the session after a redirection from # FederateAuth.get self . username = request . session . get ( "federate_username" ) self . ticket = request . session . get ( "federate_ticket" ) if self . username : del request . session [ "federate_username" ] if self . ticket : del request . session [ "federate_ticket" ]
Initialize GET received parameters
241
5
20,408
def process_get ( self ) : # generate a new LT self . gen_lt ( ) if not self . request . session . get ( "authenticated" ) or self . renew : # authentication will be needed, initialize the form to use self . init_form ( ) return self . USER_NOT_AUTHENTICATED return self . USER_AUTHENTICATED
Analyse the GET request
82
5
20,409
def init_form ( self , values = None ) : if values : values = values . copy ( ) values [ 'lt' ] = self . request . session [ 'lt' ] [ - 1 ] form_initial = { 'service' : self . service , 'method' : self . method , 'warn' : ( self . warn or self . request . session . get ( "warn" ) or self . request . COOKIES . get ( 'warn' ) ) , 'lt' : self . request . session [ 'lt' ] [ - 1 ] , 'renew' : self . renew } if settings . CAS_FEDERATE : if self . username and self . ticket : form_initial [ 'username' ] = self . username form_initial [ 'password' ] = self . ticket form_initial [ 'ticket' ] = self . ticket self . form = forms . FederateUserCredential ( values , initial = form_initial ) else : self . form = forms . FederateSelect ( values , initial = form_initial ) else : self . form = forms . UserCredential ( values , initial = form_initial )
Initialization of the good form depending of POST and GET parameters
247
12
20,410
def service_login ( self ) : try : # is the service allowed service_pattern = ServicePattern . validate ( self . service ) # is the current user allowed on this service service_pattern . check_user ( self . user ) # if the user has asked to be warned before any login to a service if self . request . session . get ( "warn" , True ) and not self . warned : messages . add_message ( self . request , messages . WARNING , _ ( u"Authentication has been required by service %(name)s (%(url)s)" ) % { 'name' : service_pattern . name , 'url' : self . service } ) if self . ajax : data = { "status" : "error" , "detail" : "confirmation needed" } return json_response ( self . request , data ) else : warn_form = forms . WarnForm ( initial = { 'service' : self . service , 'renew' : self . renew , 'gateway' : self . gateway , 'method' : self . method , 'warned' : True , 'lt' : self . request . session [ 'lt' ] [ - 1 ] } ) return render ( self . request , settings . CAS_WARN_TEMPLATE , utils . context ( { 'form' : warn_form } ) ) else : # redirect, using method ? list ( messages . get_messages ( self . request ) ) # clean messages before leaving django redirect_url = self . user . get_service_url ( self . service , service_pattern , renew = self . renewed ) if not self . ajax : return HttpResponseRedirect ( redirect_url ) else : data = { "status" : "success" , "detail" : "auth" , "url" : redirect_url } return json_response ( self . request , data ) except ServicePattern . DoesNotExist : error = 1 messages . add_message ( self . request , messages . ERROR , _ ( u'Service %(url)s not allowed.' ) % { 'url' : self . service } ) except models . BadUsername : error = 2 messages . add_message ( self . request , messages . ERROR , _ ( u"Username not allowed" ) ) except models . BadFilter : error = 3 messages . add_message ( self . request , messages . ERROR , _ ( u"User characteristics not allowed" ) ) except models . UserFieldNotDefined : error = 4 messages . add_message ( self . request , messages . ERROR , _ ( u"The attribute %(field)s is needed to use" u" that service" ) % { 'field' : service_pattern . user_field } ) # if gateway is set and auth failed redirect to the service without authentication if self . gateway and not self . ajax : list ( messages . get_messages ( self . request ) ) # clean messages before leaving django return HttpResponseRedirect ( self . service ) if not self . ajax : return render ( self . request , settings . CAS_LOGGED_TEMPLATE , utils . context ( { 'session' : self . request . session } ) ) else : data = { "status" : "error" , "detail" : "auth" , "code" : error } return json_response ( self . request , data )
Perform login against a service
732
6
20,411
def authenticated ( self ) : # Try to get the current :class:`models.User<cas_server.models.User>` object for the current # session try : self . user = models . User . objects . get ( username = self . request . session . get ( "username" ) , session_key = self . request . session . session_key ) # if not found, flush the session and redirect to the login page except models . User . DoesNotExist : logger . warning ( "User %s seems authenticated but is not found in the database." % ( self . request . session . get ( "username" ) , ) ) self . logout ( ) if self . ajax : data = { "status" : "error" , "detail" : "login required" , "url" : utils . reverse_params ( "cas_server:login" , params = self . request . GET ) } return json_response ( self . request , data ) else : return utils . redirect_params ( "cas_server:login" , params = self . request . GET ) # if login against a service if self . service : return self . service_login ( ) # else display the logged template else : if self . ajax : data = { "status" : "success" , "detail" : "logged" } return json_response ( self . request , data ) else : return render ( self . request , settings . CAS_LOGGED_TEMPLATE , utils . context ( { 'session' : self . request . session } ) )
Processing authenticated users
340
4
20,412
def not_authenticated ( self ) : if self . service : try : service_pattern = ServicePattern . validate ( self . service ) if self . gateway and not self . ajax : # clean messages before leaving django list ( messages . get_messages ( self . request ) ) return HttpResponseRedirect ( self . service ) if settings . CAS_SHOW_SERVICE_MESSAGES : if self . request . session . get ( "authenticated" ) and self . renew : messages . add_message ( self . request , messages . WARNING , _ ( u"Authentication renewal required by service %(name)s (%(url)s)." ) % { 'name' : service_pattern . name , 'url' : self . service } ) else : messages . add_message ( self . request , messages . WARNING , _ ( u"Authentication required by service %(name)s (%(url)s)." ) % { 'name' : service_pattern . name , 'url' : self . service } ) except ServicePattern . DoesNotExist : if settings . CAS_SHOW_SERVICE_MESSAGES : messages . add_message ( self . request , messages . ERROR , _ ( u'Service %s not allowed' ) % self . service ) if self . ajax : data = { "status" : "error" , "detail" : "login required" , "url" : utils . reverse_params ( "cas_server:login" , params = self . request . GET ) } return json_response ( self . request , data ) else : if settings . CAS_FEDERATE : if self . username and self . ticket : return render ( self . request , settings . CAS_LOGIN_TEMPLATE , utils . context ( { 'form' : self . form , 'auto_submit' : True , 'post_url' : reverse ( "cas_server:login" ) } ) ) else : if ( self . request . COOKIES . get ( 'remember_provider' ) and FederatedIendityProvider . objects . filter ( suffix = self . request . COOKIES [ 'remember_provider' ] ) ) : params = utils . copy_params ( self . request . GET ) url = utils . reverse_params ( "cas_server:federateAuth" , params = params , kwargs = dict ( provider = self . request . COOKIES [ 'remember_provider' ] ) ) return HttpResponseRedirect ( url ) else : # if user is authenticated and auth renewal is requested, redirect directly # to the user identity provider if self . renew and self . request . session . get ( "authenticated" ) : try : user = FederatedUser . get_from_federated_username ( self . request . session . get ( "username" ) ) params = utils . copy_params ( self . request . GET ) url = utils . reverse_params ( "cas_server:federateAuth" , params = params , kwargs = dict ( provider = user . provider . suffix ) ) return HttpResponseRedirect ( url ) # Should normally not happen: if the user is logged, it exists in the # database. except FederatedUser . DoesNotExist : # pragma: no cover pass return render ( self . request , settings . CAS_LOGIN_TEMPLATE , utils . context ( { 'form' : self . form , 'post_url' : reverse ( "cas_server:federateAuth" ) } ) ) else : return render ( self . request , settings . CAS_LOGIN_TEMPLATE , utils . context ( { 'form' : self . form } ) )
Processing non authenticated users
813
5
20,413
def common ( self ) : # if authenticated and successfully renewed authentication if needed if self . request . session . get ( "authenticated" ) and ( not self . renew or self . renewed ) : return self . authenticated ( ) else : return self . not_authenticated ( )
Common part execute uppon GET and POST request
58
10
20,414
def process_ticket ( self ) : try : proxies = [ ] if self . allow_proxy_ticket : ticket = models . Ticket . get ( self . ticket , self . renew ) else : ticket = models . ServiceTicket . get ( self . ticket , self . renew ) try : for prox in ticket . proxies . all ( ) : proxies . append ( prox . url ) except AttributeError : pass if ticket . service != self . service : raise ValidateError ( u'INVALID_SERVICE' , self . service ) return ticket , proxies except Ticket . DoesNotExist : raise ValidateError ( u'INVALID_TICKET' , self . ticket ) except ( ServiceTicket . DoesNotExist , ProxyTicket . DoesNotExist ) : raise ValidateError ( u'INVALID_TICKET' , 'ticket not found' )
fetch the ticket against the database and check its validity
189
11
20,415
def process_pgturl ( self , params ) : try : pattern = ServicePattern . validate ( self . pgt_url ) if pattern . proxy_callback : proxyid = utils . gen_pgtiou ( ) pticket = ProxyGrantingTicket . objects . create ( user = self . ticket . user , service = self . pgt_url , service_pattern = pattern , single_log_out = pattern . single_log_out ) url = utils . update_url ( self . pgt_url , { 'pgtIou' : proxyid , 'pgtId' : pticket . value } ) try : ret = requests . get ( url , verify = settings . CAS_PROXY_CA_CERTIFICATE_PATH ) if ret . status_code == 200 : params [ 'proxyGrantingTicket' ] = proxyid else : pticket . delete ( ) logger . info ( ( "ValidateService: ticket %s validated for user %s on service %s. " "Proxy Granting Ticket transmited to %s." ) % ( self . ticket . value , self . ticket . user . username , self . ticket . service , self . pgt_url ) ) logger . debug ( "ValidateService: User attributs are:\n%s" % ( pprint . pformat ( self . ticket . attributs ) , ) ) return render ( self . request , "cas_server/serviceValidate.xml" , params , content_type = "text/xml; charset=utf-8" ) except requests . exceptions . RequestException as error : error = utils . unpack_nested_exception ( error ) raise ValidateError ( u'INVALID_PROXY_CALLBACK' , u"%s: %s" % ( type ( error ) , str ( error ) ) ) else : raise ValidateError ( u'INVALID_PROXY_CALLBACK' , u"callback url not allowed by configuration" ) except ServicePattern . DoesNotExist : raise ValidateError ( u'INVALID_PROXY_CALLBACK' , u'callback url not allowed by configuration' )
Handle PGT request
470
4
20,416
def process_proxy ( self ) : try : # is the target service allowed pattern = ServicePattern . validate ( self . target_service ) # to get a proxy ticket require that the service allow it if not pattern . proxy : raise ValidateError ( u'UNAUTHORIZED_SERVICE' , u'the service %s does not allow proxy tickets' % self . target_service ) # is the proxy granting ticket valid ticket = ProxyGrantingTicket . get ( self . pgt ) # is the pgt user allowed on the target service pattern . check_user ( ticket . user ) pticket = ticket . user . get_ticket ( ProxyTicket , self . target_service , pattern , renew = False ) models . Proxy . objects . create ( proxy_ticket = pticket , url = ticket . service ) logger . info ( "Proxy ticket created for user %s on service %s." % ( ticket . user . username , self . target_service ) ) return render ( self . request , "cas_server/proxy.xml" , { 'ticket' : pticket . value } , content_type = "text/xml; charset=utf-8" ) except ( Ticket . DoesNotExist , ProxyGrantingTicket . DoesNotExist ) : raise ValidateError ( u'INVALID_TICKET' , u'PGT %s not found' % self . pgt ) except ServicePattern . DoesNotExist : raise ValidateError ( u'UNAUTHORIZED_SERVICE' , self . target_service ) except ( models . BadUsername , models . BadFilter , models . UserFieldNotDefined ) : raise ValidateError ( u'UNAUTHORIZED_USER' , u'User %s not allowed on %s' % ( ticket . user . username , self . target_service ) )
handle PT request
402
3
20,417
def process_ticket ( self ) : try : auth_req = self . root . getchildren ( ) [ 1 ] . getchildren ( ) [ 0 ] ticket = auth_req . getchildren ( ) [ 0 ] . text ticket = models . Ticket . get ( ticket ) if ticket . service != self . target : raise SamlValidateError ( u'AuthnFailed' , u'TARGET %s does not match ticket service' % self . target ) return ticket except ( IndexError , KeyError ) : raise SamlValidateError ( u'VersionMismatch' ) except Ticket . DoesNotExist : raise SamlValidateError ( u'AuthnFailed' , u'ticket %s should begin with PT- or ST-' % ticket ) except ( ServiceTicket . DoesNotExist , ProxyTicket . DoesNotExist ) : raise SamlValidateError ( u'AuthnFailed' , u'ticket %s not found' % ticket )
validate ticket from SAML XML body
211
8
20,418
def main ( source ) : if source is None : click . echo ( "You need to supply a file or url to a schema to a swagger schema, for" "the validator to work." ) return 1 try : load ( source ) click . echo ( "Validation passed" ) return 0 except ValidationError as e : raise click . ClickException ( str ( e ) )
For a given command line supplied argument negotiate the content parse the schema and then return any issues to stdout or if no schema issues return success exit code .
81
31
20,419
def load_source ( source ) : if isinstance ( source , collections . Mapping ) : return deepcopy ( source ) elif hasattr ( source , 'read' ) and callable ( source . read ) : raw_source = source . read ( ) elif os . path . exists ( os . path . expanduser ( str ( source ) ) ) : with open ( os . path . expanduser ( str ( source ) ) , 'r' ) as source_file : raw_source = source_file . read ( ) elif isinstance ( source , six . string_types ) : parts = urlparse . urlparse ( source ) if parts . scheme and parts . netloc : response = requests . get ( source ) if isinstance ( response . content , six . binary_type ) : raw_source = six . text_type ( response . content , encoding = 'utf-8' ) else : raw_source = response . content else : raw_source = source try : try : return json . loads ( raw_source ) except ValueError : pass try : return yaml . safe_load ( raw_source ) except ( yaml . scanner . ScannerError , yaml . parser . ParserError ) : pass except NameError : pass raise ValueError ( "Unable to parse `{0}`. Tried yaml and json." . format ( source ) , )
Common entry point for loading some form of raw swagger schema .
295
13
20,420
def validate ( raw_schema , target = None , * * kwargs ) : schema = schema_validator ( raw_schema , * * kwargs ) if target is not None : validate_object ( target , schema = schema , * * kwargs )
Given the python representation of a JSONschema as defined in the swagger spec validate that the schema complies to spec . If target is provided that target will be validated against the provided schema .
59
39
20,421
def validate_api_response ( schema , raw_response , request_method = 'get' , raw_request = None ) : request = None if raw_request is not None : request = normalize_request ( raw_request ) response = None if raw_response is not None : response = normalize_response ( raw_response , request = request ) if response is not None : validate_response ( response = response , request_method = request_method , schema = schema )
Validate the response of an api call against a swagger schema .
102
14
20,422
def find_parameter ( parameters , * * kwargs ) : matching_parameters = filter_parameters ( parameters , * * kwargs ) if len ( matching_parameters ) == 1 : return matching_parameters [ 0 ] elif len ( matching_parameters ) > 1 : raise MultipleParametersFound ( ) raise NoParameterFound ( )
Given a list of parameters find the one with the given name .
76
13
20,423
def merge_parameter_lists ( * parameter_definitions ) : merged_parameters = { } for parameter_list in parameter_definitions : for parameter in parameter_list : key = ( parameter [ 'name' ] , parameter [ 'in' ] ) merged_parameters [ key ] = parameter return merged_parameters . values ( )
Merge multiple lists of parameters into a single list . If there are any duplicate definitions the last write wins .
74
22
20,424
def validate_status_code_to_response_definition ( response , operation_definition ) : status_code = response . status_code operation_responses = { str ( code ) : val for code , val in operation_definition [ 'responses' ] . items ( ) } key = status_code if key not in operation_responses : key = 'default' try : response_definition = operation_responses [ key ] except KeyError : raise ValidationError ( MESSAGES [ 'response' ] [ 'invalid_status_code' ] . format ( status_code , ', ' . join ( operation_responses . keys ( ) ) , ) , ) return response_definition
Given a response validate that the response status code is in the accepted status codes defined by this endpoint .
149
20
20,425
def generate_path_validator ( api_path , path_definition , parameters , context , * * kwargs ) : path_level_parameters = dereference_parameter_list ( path_definition . get ( 'parameters' , [ ] ) , context , ) operation_level_parameters = dereference_parameter_list ( parameters , context , ) all_parameters = merge_parameter_lists ( path_level_parameters , operation_level_parameters , ) # PATH in_path_parameters = filter_parameters ( all_parameters , in_ = PATH ) return chain_reduce_partial ( attrgetter ( 'path' ) , generate_path_parameters_validator ( api_path , in_path_parameters , context ) , )
Generates a callable for validating the parameters in a response object .
174
15
20,426
def validate_response ( response , request_method , schema ) : with ErrorDict ( ) as errors : # 1 # TODO: tests try : api_path = validate_path_to_api_path ( path = response . path , context = schema , * * schema ) except ValidationError as err : errors [ 'path' ] . extend ( list ( err . messages ) ) return # this causes an exception to be raised since errors is no longer falsy. path_definition = schema [ 'paths' ] [ api_path ] or { } # TODO: tests try : operation_definition = validate_request_method_to_operation ( request_method = request_method , path_definition = path_definition , ) except ValidationError as err : errors [ 'method' ] . add_error ( err . detail ) return # 4 try : response_definition = validate_status_code_to_response_definition ( response = response , operation_definition = operation_definition , ) except ValidationError as err : errors [ 'status_code' ] . add_error ( err . detail ) else : # 5 response_validator = generate_response_validator ( api_path , operation_definition = operation_definition , path_definition = path_definition , response_definition = response_definition , context = schema , ) try : response_validator ( response , context = schema ) except ValidationError as err : errors [ 'body' ] . add_error ( err . detail )
Response validation involves the following steps . 4 . validate that the response status_code is in the allowed responses for the request method . 5 . validate that the response content validates against any provided schemas for the responses . 6 . headers content - types etc ... ???
321
53
20,427
def construct_schema_validators ( schema , context ) : validators = ValidationDict ( ) if '$ref' in schema : validators . add_validator ( '$ref' , SchemaReferenceValidator ( schema [ '$ref' ] , context ) , ) if 'properties' in schema : for property_ , property_schema in schema [ 'properties' ] . items ( ) : property_validator = generate_object_validator ( schema = property_schema , context = context , ) validators . add_property_validator ( property_ , property_validator ) if schema . get ( 'additionalProperties' ) is False : validators . add_validator ( 'additionalProperties' , generate_additional_properties_validator ( context = context , * * schema ) , ) assert 'context' not in schema for key in schema : if key in validator_mapping : validators . add_validator ( key , validator_mapping [ key ] ( context = context , * * schema ) ) return validators
Given a schema object construct a dictionary of validators needed to validate a response matching the given schema .
233
20
20,428
def validate_type ( value , types , * * kwargs ) : if not is_value_of_any_type ( value , types ) : raise ValidationError ( MESSAGES [ 'type' ] [ 'invalid' ] . format ( repr ( value ) , get_type_for_value ( value ) , types , ) )
Validate that the value is one of the provided primative types .
76
14
20,429
def generate_type_validator ( type_ , * * kwargs ) : if is_non_string_iterable ( type_ ) : types = tuple ( type_ ) else : types = ( type_ , ) # support x-nullable since Swagger 2.0 doesn't support null type # (see https://github.com/OAI/OpenAPI-Specification/issues/229) if kwargs . get ( 'x-nullable' , False ) and NULL not in types : types = types + ( NULL , ) return functools . partial ( validate_type , types = types )
Generates a callable validator for the given type or iterable of types .
132
17
20,430
def validate_multiple_of ( value , divisor , * * kwargs ) : if not decimal . Decimal ( str ( value ) ) % decimal . Decimal ( str ( divisor ) ) == 0 : raise ValidationError ( MESSAGES [ 'multiple_of' ] [ 'invalid' ] . format ( divisor , value ) , )
Given a value and a divisor validate that the value is divisible by the divisor .
81
21
20,431
def validate_minimum ( value , minimum , is_exclusive , * * kwargs ) : if is_exclusive : comparison_text = "greater than" compare_fn = operator . gt else : comparison_text = "greater than or equal to" compare_fn = operator . ge if not compare_fn ( value , minimum ) : raise ValidationError ( MESSAGES [ 'minimum' ] [ 'invalid' ] . format ( value , comparison_text , minimum ) , )
Validator function for validating that a value does not violate it s minimum allowed value . This validation can be inclusive or exclusive of the minimum depending on the value of is_exclusive .
107
37
20,432
def generate_minimum_validator ( minimum , exclusiveMinimum = False , * * kwargs ) : return functools . partial ( validate_minimum , minimum = minimum , is_exclusive = exclusiveMinimum )
Generator function returning a callable for minimum value validation .
44
12
20,433
def validate_maximum ( value , maximum , is_exclusive , * * kwargs ) : if is_exclusive : comparison_text = "less than" compare_fn = operator . lt else : comparison_text = "less than or equal to" compare_fn = operator . le if not compare_fn ( value , maximum ) : raise ValidationError ( MESSAGES [ 'maximum' ] [ 'invalid' ] . format ( value , comparison_text , maximum ) , )
Validator function for validating that a value does not violate it s maximum allowed value . This validation can be inclusive or exclusive of the maximum depending on the value of is_exclusive .
105
37
20,434
def generate_maximum_validator ( maximum , exclusiveMaximum = False , * * kwargs ) : return functools . partial ( validate_maximum , maximum = maximum , is_exclusive = exclusiveMaximum )
Generator function returning a callable for maximum value validation .
44
12
20,435
def validate_min_items ( value , minimum , * * kwargs ) : if len ( value ) < minimum : raise ValidationError ( MESSAGES [ 'min_items' ] [ 'invalid' ] . format ( minimum , len ( value ) , ) , )
Validator for ARRAY types to enforce a minimum number of items allowed for the ARRAY to be valid .
61
22
20,436
def validate_max_items ( value , maximum , * * kwargs ) : if len ( value ) > maximum : raise ValidationError ( MESSAGES [ 'max_items' ] [ 'invalid' ] . format ( maximum , len ( value ) , ) , )
Validator for ARRAY types to enforce a maximum number of items allowed for the ARRAY to be valid .
61
22
20,437
def validate_unique_items ( value , * * kwargs ) : # we can't just look at the items themselves since 0 and False are treated # the same as dictionary keys, and objects aren't hashable. counter = collections . Counter ( ( json . dumps ( v , sort_keys = True ) for v in value ) ) dupes = [ json . loads ( v ) for v , count in counter . items ( ) if count > 1 ] if dupes : raise ValidationError ( MESSAGES [ 'unique_items' ] [ 'invalid' ] . format ( repr ( dupes ) , ) , )
Validator for ARRAY types to enforce that all array items must be unique .
134
16
20,438
def validate_object ( obj , field_validators = None , non_field_validators = None , schema = None , context = None ) : if schema is None : schema = { } if context is None : context = { } if field_validators is None : field_validators = ValidationDict ( ) if non_field_validators is None : non_field_validators = ValidationList ( ) from flex . validation . schema import ( construct_schema_validators , ) schema_validators = construct_schema_validators ( schema , context ) if '$ref' in schema_validators and hasattr ( schema_validators [ '$ref' ] , 'validators' ) : ref_ = field_validators . pop ( '$ref' ) for k , v in ref_ . validators . items ( ) : if k not in schema_validators : schema_validators . add_validator ( k , v ) if 'discriminator' in schema : schema_validators = add_polymorphism_requirements ( obj , schema , context , schema_validators ) # delete resolved discriminator to avoid infinite recursion del schema [ 'discriminator' ] schema_validators . update ( field_validators ) schema_validators . validate_object ( obj , context = context ) non_field_validators . validate_object ( obj , context = context ) return obj
Takes a mapping and applies a mapping of validator functions to it collecting and reraising any validation errors that occur .
308
24
20,439
def validate_request_method_to_operation ( request_method , path_definition ) : try : operation_definition = path_definition [ request_method ] except KeyError : allowed_methods = set ( REQUEST_METHODS ) . intersection ( path_definition . keys ( ) ) raise ValidationError ( MESSAGES [ 'request' ] [ 'invalid_method' ] . format ( request_method , allowed_methods , ) , ) return operation_definition
Given a request method validate that the request method is valid for the api path .
103
16
20,440
def validate_path_to_api_path ( path , paths , basePath = '' , context = None , * * kwargs ) : if context is None : context = { } try : api_path = match_path_to_api_path ( path_definitions = paths , target_path = path , base_path = basePath , context = context , ) except LookupError as err : raise ValidationError ( str ( err ) ) except MultiplePathsFound as err : raise ValidationError ( str ( err ) ) return api_path
Given a path find the api_path it matches .
120
11
20,441
def validate_path_parameters ( target_path , api_path , path_parameters , context ) : base_path = context . get ( 'basePath' , '' ) full_api_path = re . sub ( NORMALIZE_SLASH_REGEX , '/' , base_path + api_path ) parameter_values = get_path_parameter_values ( target_path , full_api_path , path_parameters , context , ) validate_parameters ( parameter_values , path_parameters , context = context )
Helper function for validating a request path
119
8
20,442
def construct_parameter_validators ( parameter , context ) : validators = ValidationDict ( ) if '$ref' in parameter : validators . add_validator ( '$ref' , ParameterReferenceValidator ( parameter [ '$ref' ] , context ) , ) for key in parameter : if key in validator_mapping : validators . add_validator ( key , validator_mapping [ key ] ( context = context , * * parameter ) , ) if 'schema' in parameter : schema_validators = construct_schema_validators ( parameter [ 'schema' ] , context = context ) for key , value in schema_validators . items ( ) : validators . setdefault ( key , value ) return validators
Constructs a dictionary of validator functions for the provided parameter definition .
165
14
20,443
def construct_multi_parameter_validators ( parameters , context ) : validators = ValidationDict ( ) for parameter in parameters : key = parameter [ 'name' ] if key in validators : raise ValueError ( "Duplicate parameter name {0}" . format ( key ) ) parameter_validators = construct_parameter_validators ( parameter , context = context ) validators . add_validator ( key , generate_object_validator ( field_validators = parameter_validators ) , ) return validators
Given an iterable of parameters returns a dictionary of validator functions for each parameter . Note that this expects the parameters to be unique in their name value and throws an error if this is not the case .
114
41
20,444
def generate_path_parameters_validator ( api_path , path_parameters , context ) : path_parameter_validator = functools . partial ( validate_path_parameters , api_path = api_path , path_parameters = path_parameters , context = context , ) return path_parameter_validator
Generates a validator function that given a path validates that it against the path parameters
75
18
20,445
def escape_regex_special_chars ( api_path ) : def substitute ( string , replacements ) : pattern , repl = replacements return re . sub ( pattern , repl , string ) return functools . reduce ( substitute , REGEX_REPLACEMENTS , api_path )
Turns the non prametrized path components into strings subtable for using as a regex pattern . This primarily involves escaping special characters so that the actual character is matched in the regex .
62
39
20,446
def construct_parameter_pattern ( parameter ) : name = parameter [ 'name' ] type = parameter [ 'type' ] repeated = '[^/]' if type == 'integer' : repeated = '\d' return "(?P<{name}>{repeated}+)" . format ( name = name , repeated = repeated )
Given a parameter definition returns a regex pattern that will match that part of the path .
72
17
20,447
def path_to_pattern ( api_path , parameters ) : parts = re . split ( PARAMETER_REGEX , api_path ) pattern = '' . join ( ( process_path_part ( part , parameters ) for part in parts ) ) if not pattern . startswith ( '^' ) : pattern = "^{0}" . format ( pattern ) if not pattern . endswith ( '$' ) : pattern = "{0}$" . format ( pattern ) return pattern
Given an api path possibly with parameter notation return a pattern suitable for turing into a regular expression which will match request paths that conform to the parameter definitions and the api path .
106
35
20,448
def match_path_to_api_path ( path_definitions , target_path , base_path = '' , context = None ) : if context is None : context = { } assert isinstance ( context , collections . Mapping ) if target_path . startswith ( base_path ) : # Convert all of the api paths into Path instances for easier regex # matching. normalized_target_path = re . sub ( NORMALIZE_SLASH_REGEX , '/' , target_path ) matching_api_paths = list ( ) matching_api_paths_regex = list ( ) for p , v in path_definitions . items ( ) : # Doing this to help with case where we might have base_path # being just /, and then the path starts with / as well. full_path = re . sub ( NORMALIZE_SLASH_REGEX , '/' , base_path + p ) r = path_to_regex ( api_path = full_path , path_parameters = extract_path_parameters ( v ) , operation_parameters = extract_operation_parameters ( v ) , context = context , ) if full_path == normalized_target_path : matching_api_paths . append ( p ) elif r . match ( normalized_target_path ) : matching_api_paths_regex . append ( ( p , r . match ( normalized_target_path ) ) ) # Keep it consistent with the previous behavior target_path = target_path [ len ( base_path ) : ] else : matching_api_paths = [ ] matching_api_paths_regex = [ ] if not matching_api_paths and not matching_api_paths_regex : fstr = MESSAGES [ 'path' ] [ 'no_matching_paths_found' ] . format ( target_path ) raise LookupError ( fstr ) elif len ( matching_api_paths ) == 1 : return matching_api_paths [ 0 ] elif len ( matching_api_paths ) > 1 : raise MultiplePathsFound ( MESSAGES [ 'path' ] [ 'multiple_paths_found' ] . format ( target_path , [ v [ 0 ] for v in matching_api_paths ] , ) ) elif len ( matching_api_paths_regex ) == 1 : return matching_api_paths_regex [ 0 ] [ 0 ] elif len ( matching_api_paths_regex ) > 1 : # TODO: This area needs improved logic. # We check to see if any of the matched paths is longers than # the others. If so, we *assume* it is the correct match. This is # going to be prone to false positives. in certain cases. matches_by_path_size = collections . defaultdict ( list ) for path , match in matching_api_paths_regex : matches_by_path_size [ len ( path ) ] . append ( path ) longest_match = max ( matches_by_path_size . keys ( ) ) if len ( matches_by_path_size [ longest_match ] ) == 1 : return matches_by_path_size [ longest_match ] [ 0 ] raise MultiplePathsFound ( MESSAGES [ 'path' ] [ 'multiple_paths_found' ] . format ( target_path , [ v [ 0 ] for v in matching_api_paths_regex ] , ) ) else : return matching_api_paths_regex [ 0 ] [ 0 ]
Match a request or response path to one of the api paths .
790
13
20,449
def validate_request ( request , schema ) : with ErrorDict ( ) as errors : # 1 try : api_path = validate_path_to_api_path ( path = request . path , context = schema , * * schema ) except ValidationError as err : errors [ 'path' ] . add_error ( err . detail ) return # this causes an exception to be raised since errors is no longer falsy. path_definition = schema [ 'paths' ] [ api_path ] or { } if not path_definition : # TODO: is it valid to not have a definition for a path? return # 2 try : operation_definition = validate_request_method_to_operation ( request_method = request . method , path_definition = path_definition , ) except ValidationError as err : errors [ 'method' ] . add_error ( err . detail ) return if operation_definition is None : # TODO: is this compliant with swagger, can path operations have a null # definition? return # 3 operation_validators = construct_operation_validators ( api_path = api_path , path_definition = path_definition , operation_definition = operation_definition , context = schema , ) try : validate_operation ( request , operation_validators , context = schema ) except ValidationError as err : errors [ 'method' ] . add_error ( err . detail )
Request validation does the following steps .
300
7
20,450
def normalize_request ( request ) : if isinstance ( request , Request ) : return request for normalizer in REQUEST_NORMALIZERS : try : return normalizer ( request ) except TypeError : continue raise ValueError ( "Unable to normalize the provided request" )
Given a request normalize it to the internal Request class .
60
12
20,451
def normalize_response ( response , request = None ) : if isinstance ( response , Response ) : return response if request is not None and not isinstance ( request , Request ) : request = normalize_request ( request ) for normalizer in RESPONSE_NORMALIZERS : try : return normalizer ( response , request = request ) except TypeError : continue raise ValueError ( "Unable to normalize the provided response" )
Given a response normalize it to the internal Response class . This also involves normalizing the associated request object .
93
22
20,452
def generate_header_validator ( headers , context , * * kwargs ) : validators = ValidationDict ( ) for header_definition in headers : header_processor = generate_value_processor ( context = context , * * header_definition ) header_validator = generate_object_validator ( field_validators = construct_header_validators ( header_definition , context = context ) , ) validators . add_property_validator ( header_definition [ 'name' ] , chain_reduce_partial ( header_processor , header_validator , ) , ) return generate_object_validator ( field_validators = validators )
Generates a validation function that will validate a dictionary of headers .
143
13
20,453
def generate_parameters_validator ( api_path , path_definition , parameters , context , * * kwargs ) : # TODO: figure out how to merge this with the same code in response # validation. validators = ValidationDict ( ) path_level_parameters = dereference_parameter_list ( path_definition . get ( 'parameters' , [ ] ) , context , ) operation_level_parameters = dereference_parameter_list ( parameters , context , ) all_parameters = merge_parameter_lists ( path_level_parameters , operation_level_parameters , ) # PATH in_path_parameters = filter_parameters ( all_parameters , in_ = PATH ) validators . add_validator ( 'path' , chain_reduce_partial ( attrgetter ( 'path' ) , generate_path_parameters_validator ( api_path , in_path_parameters , context ) , ) , ) # QUERY in_query_parameters = filter_parameters ( all_parameters , in_ = QUERY ) validators . add_validator ( 'query' , chain_reduce_partial ( attrgetter ( 'query_data' ) , functools . partial ( validate_query_parameters , query_parameters = in_query_parameters , context = context , ) , ) , ) # HEADERS in_header_parameters = filter_parameters ( all_parameters , in_ = HEADER ) validators . add_validator ( 'headers' , chain_reduce_partial ( attrgetter ( 'headers' ) , generate_header_validator ( in_header_parameters , context ) , ) , ) # FORM_DATA # in_form_data_parameters = filter_parameters(all_parameters, in_=FORM_DATA) # validators.add_validator( # 'form_data', # chain_reduce_partial( # attrgetter('data'), # generate_form_data_validator(in_form_data_parameters, context), # ) # ) # REQUEST_BODY in_request_body_parameters = filter_parameters ( all_parameters , in_ = BODY ) validators . add_validator ( 'request_body' , chain_reduce_partial ( attrgetter ( 'data' ) , generate_request_body_validator ( in_request_body_parameters , context ) , ) ) return generate_object_validator ( field_validators = validators )
Generates a validator function to validate .
570
9
20,454
def partial_safe_wraps ( wrapped_func , * args , * * kwargs ) : if isinstance ( wrapped_func , functools . partial ) : return partial_safe_wraps ( wrapped_func . func ) else : return functools . wraps ( wrapped_func )
A version of functools . wraps that is safe to wrap a partial in .
64
17
20,455
def skip_if_empty ( func ) : @ partial_safe_wraps ( func ) def inner ( value , * args , * * kwargs ) : if value is EMPTY : return else : return func ( value , * args , * * kwargs ) return inner
Decorator for validation functions which makes them pass if the value passed in is the EMPTY sentinal value .
60
23
20,456
def rewrite_reserved_words ( func ) : @ partial_safe_wraps ( func ) def inner ( * args , * * kwargs ) : for word in RESERVED_WORDS : key = "{0}_" . format ( word ) if key in kwargs : kwargs [ word ] = kwargs . pop ( key ) return func ( * args , * * kwargs ) return inner
Given a function whos kwargs need to contain a reserved word such as in allow calling that function with the keyword as in_ such that function kwargs are rewritten to use the reserved word .
93
41
20,457
def any_validator ( obj , validators , * * kwargs ) : if not len ( validators ) > 1 : raise ValueError ( "any_validator requires at least 2 validator. Only got " "{0}" . format ( len ( validators ) ) ) errors = ErrorDict ( ) for key , validator in validators . items ( ) : try : validator ( obj , * * kwargs ) except ValidationError as err : errors [ key ] = err . detail else : break else : if len ( errors ) == 1 : # Special case for a single error. Just raise it as if it was the # only validator run. error = errors . values ( ) [ 0 ] raise ValidationError ( error ) else : # Raise all of the errors with the key namespaces. errors . raise_ ( )
Attempt multiple validators on an object .
180
8
20,458
def _extract_to_tempdir ( archive_filename ) : if not os . path . exists ( archive_filename ) : raise Exception ( "Archive '%s' does not exist" % ( archive_filename ) ) tempdir = tempfile . mkdtemp ( prefix = "metaextract_" ) current_cwd = os . getcwd ( ) try : if tarfile . is_tarfile ( archive_filename ) : with tarfile . open ( archive_filename ) as f : f . extractall ( tempdir ) elif zipfile . is_zipfile ( archive_filename ) : with zipfile . ZipFile ( archive_filename ) as f : f . extractall ( tempdir ) else : raise Exception ( "Can not extract '%s'. " "Not a tar or zip file" % archive_filename ) os . chdir ( tempdir ) yield tempdir finally : os . chdir ( current_cwd ) shutil . rmtree ( tempdir )
extract the given tarball or zipfile to a tempdir and change the cwd to the new tempdir . Delete the tempdir at the end
214
31
20,459
def _enter_single_subdir ( root_dir ) : current_cwd = os . getcwd ( ) try : dest_dir = root_dir dir_list = os . listdir ( root_dir ) if len ( dir_list ) == 1 : first = os . path . join ( root_dir , dir_list [ 0 ] ) if os . path . isdir ( first ) : dest_dir = first else : dest_dir = root_dir os . chdir ( dest_dir ) yield dest_dir finally : os . chdir ( current_cwd )
if the given directory has just a single subdir enter that
127
12
20,460
def _set_file_encoding_utf8 ( filename ) : with open ( filename , 'r+' ) as f : content = f . read ( ) f . seek ( 0 , 0 ) f . write ( "# -*- coding: utf-8 -*-\n" + content )
set a encoding header as suggested in PEP - 0263 . This is not entirely correct because we don t know the encoding of the given file but it s at least a chance to get metadata from the setup . py
66
44
20,461
def _setup_py_run_from_dir ( root_dir , py_interpreter ) : data = { } with _enter_single_subdir ( root_dir ) as single_subdir : if not os . path . exists ( "setup.py" ) : raise Exception ( "'setup.py' does not exist in '%s'" % ( single_subdir ) ) # generate a temporary json file which contains the metadata output_json = tempfile . NamedTemporaryFile ( ) cmd = "%s setup.py -q --command-packages metaextract " "metaextract -o %s " % ( py_interpreter , output_json . name ) try : subprocess . check_output ( cmd , stderr = subprocess . STDOUT , shell = True ) except subprocess . CalledProcessError : # try again with a encoding in setup.py _set_file_encoding_utf8 ( "setup.py" ) subprocess . check_output ( cmd , shell = True ) # read json file and return data with open ( output_json . name , "r" ) as f : data = json . loads ( f . read ( ) ) # sort some of the keys if the dict values are lists for key in [ 'data_files' , 'entry_points' , 'extras_require' , 'install_requires' , 'setup_requires' , 'scripts' , 'tests_require' , 'tests_suite' ] : if key in data [ 'data' ] and isinstance ( data [ 'data' ] [ key ] , list ) : data [ 'data' ] [ key ] = sorted ( data [ 'data' ] [ key ] ) return data
run the extractmeta command via the setup . py in the given root_dir . the output of extractmeta is json and is stored in a tempfile which is then read in and returned as data
371
40
20,462
def from_archive ( archive_filename , py_interpreter = sys . executable ) : with _extract_to_tempdir ( archive_filename ) as root_dir : data = _setup_py_run_from_dir ( root_dir , py_interpreter ) return data
extract metadata from a given sdist archive file
64
10
20,463
def xmlns ( source ) : namespaces = { } events = ( "end" , "start-ns" , "end-ns" ) for ( event , elem ) in iterparse ( source , events ) : if event == "start-ns" : prefix , ns = elem namespaces [ prefix ] = ns elif event == "end" : break # Reset stream if hasattr ( source , "seek" ) : source . seek ( 0 ) return namespaces
Returns a map of prefix to namespace for the given XML file .
101
13
20,464
def create_block ( mc , block_id , subtype = None ) : # Get player tile position and real position. ptx , pty , ptz = mc . player . getTilePos ( ) px , py , pz = mc . player . getPos ( ) # Create block at current player tile location. if subtype is None : mc . setBlock ( ptx , pty , ptz , block_id ) else : mc . setBlock ( ptx , pty , ptz , block_id , subtype ) # Move the player's real positon up one block. mc . player . setPos ( px , py + 1 , pz )
Build a block with the specified id and subtype under the player in the Minecraft world . Subtype is optional and can be specified as None to use the default subtype for the block .
145
38
20,465
def _busy_wait_ms ( self , ms ) : start = time . time ( ) delta = ms / 1000.0 while ( time . time ( ) - start ) <= delta : pass
Busy wait for the specified number of milliseconds .
42
10
20,466
def _write_frame ( self , data ) : assert data is not None and 0 < len ( data ) < 255 , 'Data must be array of 1 to 255 bytes.' # Build frame to send as: # - SPI data write (0x01) # - Preamble (0x00) # - Start code (0x00, 0xFF) # - Command length (1 byte) # - Command length checksum # - Command bytes # - Checksum # - Postamble (0x00) length = len ( data ) frame = bytearray ( length + 8 ) frame [ 0 ] = PN532_SPI_DATAWRITE frame [ 1 ] = PN532_PREAMBLE frame [ 2 ] = PN532_STARTCODE1 frame [ 3 ] = PN532_STARTCODE2 frame [ 4 ] = length & 0xFF frame [ 5 ] = self . _uint8_add ( ~ length , 1 ) frame [ 6 : - 2 ] = data checksum = reduce ( self . _uint8_add , data , 0xFF ) frame [ - 2 ] = ~ checksum & 0xFF frame [ - 1 ] = PN532_POSTAMBLE # Send frame. logger . debug ( 'Write frame: 0x{0}' . format ( binascii . hexlify ( frame ) ) ) self . _gpio . set_low ( self . _cs ) self . _busy_wait_ms ( 2 ) self . _spi . write ( frame ) self . _gpio . set_high ( self . _cs )
Write a frame to the PN532 with the specified data bytearray .
354
18
20,467
def _read_data ( self , count ) : # Build a read request frame. frame = bytearray ( count ) frame [ 0 ] = PN532_SPI_DATAREAD # Send the frame and return the response, ignoring the SPI header byte. self . _gpio . set_low ( self . _cs ) self . _busy_wait_ms ( 2 ) response = self . _spi . transfer ( frame ) self . _gpio . set_high ( self . _cs ) return response
Read a specified count of bytes from the PN532 .
115
13
20,468
def _read_frame ( self , length ) : # Read frame with expected length of data. response = self . _read_data ( length + 8 ) logger . debug ( 'Read frame: 0x{0}' . format ( binascii . hexlify ( response ) ) ) # Check frame starts with 0x01 and then has 0x00FF (preceeded by optional # zeros). if response [ 0 ] != 0x01 : raise RuntimeError ( 'Response frame does not start with 0x01!' ) # Swallow all the 0x00 values that preceed 0xFF. offset = 1 while response [ offset ] == 0x00 : offset += 1 if offset >= len ( response ) : raise RuntimeError ( 'Response frame preamble does not contain 0x00FF!' ) if response [ offset ] != 0xFF : raise RuntimeError ( 'Response frame preamble does not contain 0x00FF!' ) offset += 1 if offset >= len ( response ) : raise RuntimeError ( 'Response contains no data!' ) # Check length & length checksum match. frame_len = response [ offset ] if ( frame_len + response [ offset + 1 ] ) & 0xFF != 0 : raise RuntimeError ( 'Response length checksum did not match length!' ) # Check frame checksum value matches bytes. checksum = reduce ( self . _uint8_add , response [ offset + 2 : offset + 2 + frame_len + 1 ] , 0 ) if checksum != 0 : raise RuntimeError ( 'Response checksum did not match expected value!' ) # Return frame data. return response [ offset + 2 : offset + 2 + frame_len ]
Read a response frame from the PN532 of at most length bytes in size . Returns the data inside the frame if found otherwise raises an exception if there is an error parsing the frame . Note that less than length bytes might be returned!
358
49
20,469
def _wait_ready ( self , timeout_sec = 1 ) : start = time . time ( ) # Send a SPI status read command and read response. self . _gpio . set_low ( self . _cs ) self . _busy_wait_ms ( 2 ) response = self . _spi . transfer ( [ PN532_SPI_STATREAD , 0x00 ] ) self . _gpio . set_high ( self . _cs ) # Loop until a ready response is received. while response [ 1 ] != PN532_SPI_READY : # Check if the timeout has been exceeded. if time . time ( ) - start >= timeout_sec : return False # Wait a little while and try reading the status again. time . sleep ( 0.01 ) self . _gpio . set_low ( self . _cs ) self . _busy_wait_ms ( 2 ) response = self . _spi . transfer ( [ PN532_SPI_STATREAD , 0x00 ] ) self . _gpio . set_high ( self . _cs ) return True
Wait until the PN532 is ready to receive commands . At most wait timeout_sec seconds for the PN532 to be ready . If the PN532 is ready before the timeout is exceeded then True will be returned otherwise False is returned when the timeout is exceeded .
243
58
20,470
def call_function ( self , command , response_length = 0 , params = [ ] , timeout_sec = 1 ) : # Build frame data with command and parameters. data = bytearray ( 2 + len ( params ) ) data [ 0 ] = PN532_HOSTTOPN532 data [ 1 ] = command & 0xFF data [ 2 : ] = params # Send frame and wait for response. self . _write_frame ( data ) if not self . _wait_ready ( timeout_sec ) : return None # Verify ACK response and wait to be ready for function response. response = self . _read_data ( len ( PN532_ACK ) ) if response != PN532_ACK : raise RuntimeError ( 'Did not receive expected ACK from PN532!' ) if not self . _wait_ready ( timeout_sec ) : return None # Read response bytes. response = self . _read_frame ( response_length + 2 ) # Check that response is for the called function. if not ( response [ 0 ] == PN532_PN532TOHOST and response [ 1 ] == ( command + 1 ) ) : raise RuntimeError ( 'Received unexpected command response!' ) # Return response data. return response [ 2 : ]
Send specified command to the PN532 and expect up to response_length bytes back in a response . Note that less than the expected bytes might be returned! Params can optionally specify an array of bytes to send as parameters to the function call . Will wait up to timeout_secs seconds for a response and return a bytearray of response bytes or None if no response is available within the timeout .
278
84
20,471
def begin ( self ) : # Assert CS pin low for a second for PN532 to be ready. self . _gpio . set_low ( self . _cs ) time . sleep ( 1.0 ) # Call GetFirmwareVersion to sync up with the PN532. This might not be # required but is done in the Arduino library and kept for consistency. self . get_firmware_version ( ) self . _gpio . set_high ( self . _cs )
Initialize communication with the PN532 . Must be called before any other calls are made against the PN532 .
108
26
20,472
def get_firmware_version ( self ) : response = self . call_function ( PN532_COMMAND_GETFIRMWAREVERSION , 4 ) if response is None : raise RuntimeError ( 'Failed to detect the PN532! Make sure there is sufficient power (use a 1 amp or greater power supply), the PN532 is wired correctly to the device, and the solder joints on the PN532 headers are solidly connected.' ) return ( response [ 0 ] , response [ 1 ] , response [ 2 ] , response [ 3 ] )
Call PN532 GetFirmwareVersion function and return a tuple with the IC Ver Rev and Support values .
123
24
20,473
def read_passive_target ( self , card_baud = PN532_MIFARE_ISO14443A , timeout_sec = 1 ) : # Send passive read command for 1 card. Expect at most a 7 byte UUID. response = self . call_function ( PN532_COMMAND_INLISTPASSIVETARGET , params = [ 0x01 , card_baud ] , response_length = 17 ) # If no response is available return None to indicate no card is present. if response is None : return None # Check only 1 card with up to a 7 byte UID is present. if response [ 0 ] != 0x01 : raise RuntimeError ( 'More than one card detected!' ) if response [ 5 ] > 7 : raise RuntimeError ( 'Found card with unexpectedly long UID!' ) # Return UID of card. return response [ 6 : 6 + response [ 5 ] ]
Wait for a MiFare card to be available and return its UID when found . Will wait up to timeout_sec seconds and return None if no card is found otherwise a bytearray with the UID of the found card is returned .
197
49
20,474
def mifare_classic_read_block ( self , block_number ) : # Send InDataExchange request to read block of MiFare data. response = self . call_function ( PN532_COMMAND_INDATAEXCHANGE , params = [ 0x01 , MIFARE_CMD_READ , block_number & 0xFF ] , response_length = 17 ) # Check first response is 0x00 to show success. if response [ 0 ] != 0x00 : return None # Return first 4 bytes since 16 bytes are always returned. return response [ 1 : ]
Read a block of data from the card . Block number should be the block to read . If the block is successfully read a bytearray of length 16 with data starting at the specified block will be returned . If the block is not read then None will be returned .
130
55
20,475
def mifare_classic_write_block ( self , block_number , data ) : assert data is not None and len ( data ) == 16 , 'Data must be an array of 16 bytes!' # Build parameters for InDataExchange command to do MiFare classic write. params = bytearray ( 19 ) params [ 0 ] = 0x01 # Max card numbers params [ 1 ] = MIFARE_CMD_WRITE params [ 2 ] = block_number & 0xFF params [ 3 : ] = data # Send InDataExchange request. response = self . call_function ( PN532_COMMAND_INDATAEXCHANGE , params = params , response_length = 1 ) return response [ 0 ] == 0x00
Write a block of data to the card . Block number should be the block to write and data should be a byte array of length 16 with the data to write . If the data is successfully written then True is returned otherwise False is returned .
164
48
20,476
def _dirmatch ( path , matchwith ) : matchlen = len ( matchwith ) if ( path . startswith ( matchwith ) and path [ matchlen : matchlen + 1 ] in [ os . sep , '' ] ) : return True return False
Check if path is within matchwith s tree .
56
10
20,477
def _virtualenv_sys ( venv_path ) : executable = os . path . join ( venv_path , env_bin_dir , 'python' ) # Must use "executable" as the first argument rather than as the # keyword argument "executable" to get correct value from sys.path p = subprocess . Popen ( [ executable , '-c' , 'import sys;' 'print (sys.version[:3]);' 'print ("\\n".join(sys.path));' ] , env = { } , stdout = subprocess . PIPE ) stdout , err = p . communicate ( ) assert not p . returncode and stdout lines = stdout . decode ( 'utf-8' ) . splitlines ( ) return lines [ 0 ] , list ( filter ( bool , lines [ 1 : ] ) )
obtain version and path info from a virtualenv .
184
11
20,478
def int_to_ef ( n ) : flags = { } for name , value in libarchive . constants . archive_entry . FILETYPES . items ( ) : flags [ name ] = ( n & value ) > 0 return ENTRY_FILETYPE ( * * flags )
This is here for testing support but in practice this isn t very useful as many of the flags are just combinations of other flags . The relationships are defined by the OS in ways that aren t semantically intuitive to this project .
60
45
20,479
def _enumerator ( opener , entry_cls , format_code = None , filter_code = None ) : archive_res = _archive_read_new ( ) try : r = _set_read_context ( archive_res , format_code , filter_code ) opener ( archive_res ) def it ( ) : while 1 : with _archive_read_next_header ( archive_res ) as entry_res : if entry_res is None : break e = entry_cls ( archive_res , entry_res ) yield e if e . is_consumed is False : _archive_read_data_skip ( archive_res ) yield it ( ) finally : _archive_read_free ( archive_res )
Return an archive enumerator from a user - defined source using a user - defined entry type .
159
19
20,480
def file_enumerator ( filepath , block_size = 10240 , * args , * * kwargs ) : _LOGGER . debug ( "Enumerating through archive file: %s" , filepath ) def opener ( archive_res ) : _LOGGER . debug ( "Opening from file (file_enumerator): %s" , filepath ) _archive_read_open_filename ( archive_res , filepath , block_size ) if 'entry_cls' not in kwargs : kwargs [ 'entry_cls' ] = _ArchiveEntryItReadable return _enumerator ( opener , * args , * * kwargs )
Return an enumerator that knows how to read a physical file .
149
13
20,481
def memory_enumerator ( buffer_ , * args , * * kwargs ) : _LOGGER . debug ( "Enumerating through (%d) bytes of archive data." , len ( buffer_ ) ) def opener ( archive_res ) : _LOGGER . debug ( "Opening from (%d) bytes (memory_enumerator)." , len ( buffer_ ) ) _archive_read_open_memory ( archive_res , buffer_ ) if 'entry_cls' not in kwargs : kwargs [ 'entry_cls' ] = _ArchiveEntryItReadable return _enumerator ( opener , * args , * * kwargs )
Return an enumerator that knows how to read raw memory .
146
12
20,482
def _pour ( opener , flags = 0 , * args , * * kwargs ) : with _enumerator ( opener , * args , entry_cls = _ArchiveEntryItState , * * kwargs ) as r : ext = libarchive . calls . archive_write . c_archive_write_disk_new ( ) libarchive . calls . archive_write . c_archive_write_disk_set_options ( ext , flags ) for state in r : yield state if state . selected is False : continue r = libarchive . calls . archive_write . c_archive_write_header ( ext , state . entry_res ) buff = ctypes . c_void_p ( ) size = ctypes . c_size_t ( ) offset = ctypes . c_longlong ( ) while 1 : r = libarchive . calls . archive_read . c_archive_read_data_block ( state . reader_res , ctypes . byref ( buff ) , ctypes . byref ( size ) , ctypes . byref ( offset ) ) if r == libarchive . constants . archive . ARCHIVE_EOF : break elif r != libarchive . constants . archive . ARCHIVE_OK : message = c_archive_error_string ( state . reader_res ) raise libarchive . exception . ArchiveError ( "Pour failed: (%d) [%s]" % ( r , message ) ) r = libarchive . calls . archive_write . c_archive_write_data_block ( ext , buff , size , offset ) r = libarchive . calls . archive_write . c_archive_write_finish_entry ( ext )
A flexible pouring facility that knows how to enumerate entry data .
365
13
20,483
def file_pour ( filepath , block_size = 10240 , * args , * * kwargs ) : def opener ( archive_res ) : _LOGGER . debug ( "Opening from file (file_pour): %s" , filepath ) _archive_read_open_filename ( archive_res , filepath , block_size ) return _pour ( opener , * args , flags = 0 , * * kwargs )
Write physical files from entries .
94
6
20,484
def memory_pour ( buffer_ , * args , * * kwargs ) : def opener ( archive_res ) : _LOGGER . debug ( "Opening from (%d) bytes (memory_pour)." , len ( buffer_ ) ) _archive_read_open_memory ( archive_res , buffer_ ) return _pour ( opener , * args , flags = 0 , * * kwargs )
Yield data from entries .
86
6
20,485
def _archive_write_data ( archive , data ) : n = libarchive . calls . archive_write . c_archive_write_data ( archive , ctypes . cast ( ctypes . c_char_p ( data ) , ctypes . c_void_p ) , len ( data ) ) if n == 0 : message = c_archive_error_string ( archive ) raise ValueError ( "No bytes were written. Error? [%s]" % ( message ) )
Write data to archive . This will only be called with a non - empty string .
104
17
20,486
def _write_ctrl_meas ( self ) : self . _write_register_byte ( _BME280_REGISTER_CTRL_HUM , self . overscan_humidity ) self . _write_register_byte ( _BME280_REGISTER_CTRL_MEAS , self . _ctrl_meas )
Write the values to the ctrl_meas and ctrl_hum registers in the device ctrl_meas sets the pressure and temperature data acquistion options ctrl_hum sets the humidty oversampling and must be written to first
74
51
20,487
def _write_config ( self ) : normal_flag = False if self . _mode == MODE_NORMAL : #Writes to the config register may be ignored while in Normal mode normal_flag = True self . mode = MODE_SLEEP #So we switch to Sleep mode first self . _write_register_byte ( _BME280_REGISTER_CONFIG , self . _config ) if normal_flag : self . mode = MODE_NORMAL
Write the value to the config register in the device
102
10
20,488
def _config ( self ) : config = 0 if self . mode == MODE_NORMAL : config += ( self . _t_standby << 5 ) if self . _iir_filter : config += ( self . _iir_filter << 2 ) return config
Value to be written to the device s config register
58
10
20,489
def _ctrl_meas ( self ) : ctrl_meas = ( self . overscan_temperature << 5 ) ctrl_meas += ( self . overscan_pressure << 2 ) ctrl_meas += self . mode return ctrl_meas
Value to be written to the device s ctrl_meas register
58
14
20,490
def measurement_time_typical ( self ) : meas_time_ms = 1.0 if self . overscan_temperature != OVERSCAN_DISABLE : meas_time_ms += ( 2 * _BME280_OVERSCANS . get ( self . overscan_temperature ) ) if self . overscan_pressure != OVERSCAN_DISABLE : meas_time_ms += ( 2 * _BME280_OVERSCANS . get ( self . overscan_pressure ) + 0.5 ) if self . overscan_humidity != OVERSCAN_DISABLE : meas_time_ms += ( 2 * _BME280_OVERSCANS . get ( self . overscan_humidity ) + 0.5 ) return meas_time_ms
Typical time in milliseconds required to complete a measurement in normal mode
170
13
20,491
def pressure ( self ) : self . _read_temperature ( ) # Algorithm from the BME280 driver # https://github.com/BoschSensortec/BME280_driver/blob/master/bme280.c adc = self . _read24 ( _BME280_REGISTER_PRESSUREDATA ) / 16 # lowest 4 bits get dropped var1 = float ( self . _t_fine ) / 2.0 - 64000.0 var2 = var1 * var1 * self . _pressure_calib [ 5 ] / 32768.0 var2 = var2 + var1 * self . _pressure_calib [ 4 ] * 2.0 var2 = var2 / 4.0 + self . _pressure_calib [ 3 ] * 65536.0 var3 = self . _pressure_calib [ 2 ] * var1 * var1 / 524288.0 var1 = ( var3 + self . _pressure_calib [ 1 ] * var1 ) / 524288.0 var1 = ( 1.0 + var1 / 32768.0 ) * self . _pressure_calib [ 0 ] if var1 == 0 : return 0 if var1 : pressure = 1048576.0 - adc pressure = ( ( pressure - var2 / 4096.0 ) * 6250.0 ) / var1 var1 = self . _pressure_calib [ 8 ] * pressure * pressure / 2147483648.0 var2 = pressure * self . _pressure_calib [ 7 ] / 32768.0 pressure = pressure + ( var1 + var2 + self . _pressure_calib [ 6 ] ) / 16.0 pressure /= 100 if pressure < _BME280_PRESSURE_MIN_HPA : return _BME280_PRESSURE_MIN_HPA if pressure > _BME280_PRESSURE_MAX_HPA : return _BME280_PRESSURE_MAX_HPA return pressure else : return _BME280_PRESSURE_MIN_HPA
The compensated pressure in hectoPascals . returns None if pressure measurement is disabled
451
17
20,492
def humidity ( self ) : self . _read_temperature ( ) hum = self . _read_register ( _BME280_REGISTER_HUMIDDATA , 2 ) #print("Humidity data: ", hum) adc = float ( hum [ 0 ] << 8 | hum [ 1 ] ) #print("adc:", adc) # Algorithm from the BME280 driver # https://github.com/BoschSensortec/BME280_driver/blob/master/bme280.c var1 = float ( self . _t_fine ) - 76800.0 #print("var1 ", var1) var2 = ( self . _humidity_calib [ 3 ] * 64.0 + ( self . _humidity_calib [ 4 ] / 16384.0 ) * var1 ) #print("var2 ",var2) var3 = adc - var2 #print("var3 ",var3) var4 = self . _humidity_calib [ 1 ] / 65536.0 #print("var4 ",var4) var5 = ( 1.0 + ( self . _humidity_calib [ 2 ] / 67108864.0 ) * var1 ) #print("var5 ",var5) var6 = 1.0 + ( self . _humidity_calib [ 5 ] / 67108864.0 ) * var1 * var5 #print("var6 ",var6) var6 = var3 * var4 * ( var5 * var6 ) humidity = var6 * ( 1.0 - self . _humidity_calib [ 0 ] * var6 / 524288.0 ) if humidity > _BME280_HUMIDITY_MAX : return _BME280_HUMIDITY_MAX if humidity < _BME280_HUMIDITY_MIN : return _BME280_HUMIDITY_MIN # else... return humidity
The relative humidity in RH % returns None if humidity measurement is disabled
427
13
20,493
def _read_coefficients ( self ) : coeff = self . _read_register ( _BME280_REGISTER_DIG_T1 , 24 ) coeff = list ( struct . unpack ( '<HhhHhhhhhhhh' , bytes ( coeff ) ) ) coeff = [ float ( i ) for i in coeff ] self . _temp_calib = coeff [ : 3 ] self . _pressure_calib = coeff [ 3 : ] self . _humidity_calib = [ 0 ] * 6 self . _humidity_calib [ 0 ] = self . _read_byte ( _BME280_REGISTER_DIG_H1 ) coeff = self . _read_register ( _BME280_REGISTER_DIG_H2 , 7 ) coeff = list ( struct . unpack ( '<hBBBBb' , bytes ( coeff ) ) ) self . _humidity_calib [ 1 ] = float ( coeff [ 0 ] ) self . _humidity_calib [ 2 ] = float ( coeff [ 1 ] ) self . _humidity_calib [ 3 ] = float ( ( coeff [ 2 ] << 4 ) | ( coeff [ 3 ] & 0xF ) ) self . _humidity_calib [ 4 ] = float ( ( coeff [ 4 ] << 4 ) | ( coeff [ 3 ] >> 4 ) ) self . _humidity_calib [ 5 ] = float ( coeff [ 5 ] )
Read & save the calibration coefficients
332
6
20,494
def _read24 ( self , register ) : ret = 0.0 for b in self . _read_register ( register , 3 ) : ret *= 256.0 ret += float ( b & 0xFF ) return ret
Read an unsigned 24 - bit value as a floating point and return it .
48
15
20,495
def _create ( self , postData ) : if self . infos is None : r = self . connection . session . post ( self . indexesURL , params = { "collection" : self . collection . name } , data = json . dumps ( postData , default = str ) ) data = r . json ( ) if ( r . status_code >= 400 ) or data [ 'error' ] : raise CreationError ( data [ 'errorMessage' ] , data ) self . infos = data
Creates an index of any type according to postData
106
11
20,496
def createVertex ( self , collectionName , docAttributes , waitForSync = False ) : url = "%s/vertex/%s" % ( self . URL , collectionName ) store = DOC . DocumentStore ( self . database [ collectionName ] , validators = self . database [ collectionName ] . _fields , initDct = docAttributes ) # self.database[collectionName].validateDct(docAttributes) store . validate ( ) r = self . connection . session . post ( url , data = json . dumps ( docAttributes , default = str ) , params = { 'waitForSync' : waitForSync } ) data = r . json ( ) if r . status_code == 201 or r . status_code == 202 : return self . database [ collectionName ] [ data [ "vertex" ] [ "_key" ] ] raise CreationError ( "Unable to create vertice, %s" % data [ "errorMessage" ] , data )
adds a vertex to the graph and returns it
209
10
20,497
def deleteVertex ( self , document , waitForSync = False ) : url = "%s/vertex/%s" % ( self . URL , document . _id ) r = self . connection . session . delete ( url , params = { 'waitForSync' : waitForSync } ) data = r . json ( ) if r . status_code == 200 or r . status_code == 202 : return True raise DeletionError ( "Unable to delete vertice, %s" % document . _id , data )
deletes a vertex from the graph as well as al linked edges
115
13
20,498
def createEdge ( self , collectionName , _fromId , _toId , edgeAttributes , waitForSync = False ) : if not _fromId : raise ValueError ( "Invalid _fromId: %s" % _fromId ) if not _toId : raise ValueError ( "Invalid _toId: %s" % _toId ) if collectionName not in self . definitions : raise KeyError ( "'%s' is not among the edge definitions" % collectionName ) url = "%s/edge/%s" % ( self . URL , collectionName ) self . database [ collectionName ] . validatePrivate ( "_from" , _fromId ) self . database [ collectionName ] . validatePrivate ( "_to" , _toId ) ed = self . database [ collectionName ] . createEdge ( ) ed . set ( edgeAttributes ) ed . validate ( ) payload = ed . getStore ( ) payload . update ( { '_from' : _fromId , '_to' : _toId } ) r = self . connection . session . post ( url , data = json . dumps ( payload , default = str ) , params = { 'waitForSync' : waitForSync } ) data = r . json ( ) if r . status_code == 201 or r . status_code == 202 : return self . database [ collectionName ] [ data [ "edge" ] [ "_key" ] ] # print "\ngraph 160, ", data, payload, _fromId raise CreationError ( "Unable to create edge, %s" % r . json ( ) [ "errorMessage" ] , data )
creates an edge between two documents
346
7
20,499
def link ( self , definition , doc1 , doc2 , edgeAttributes , waitForSync = False ) : if type ( doc1 ) is DOC . Document : if not doc1 . _id : doc1 . save ( ) doc1_id = doc1 . _id else : doc1_id = doc1 if type ( doc2 ) is DOC . Document : if not doc2 . _id : doc2 . save ( ) doc2_id = doc2 . _id else : doc2_id = doc2 return self . createEdge ( definition , doc1_id , doc2_id , edgeAttributes , waitForSync )
A shorthand for createEdge that takes two documents as input
136
11