idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
20,500
def get_cas_client ( self , request , provider , renew = False ) : service_url = utils . get_current_url ( request , { "ticket" , "provider" } ) self . service_url = service_url return CASFederateValidateUser ( provider , service_url , renew = renew )
return a CAS client object matching provider
20,501
def post ( self , request , provider = None ) : if not settings . CAS_FEDERATE : logger . warning ( "CAS_FEDERATE is False, set it to True to use federation" ) return redirect ( "cas_server:login" ) try : provider = FederatedIendityProvider . objects . get ( suffix = provider ) auth = self . get_cas_client ( request , provider ) try : auth . clean_sessions ( request . POST [ 'logoutRequest' ] ) except ( KeyError , AttributeError ) : pass return HttpResponse ( "ok" ) except FederatedIendityProvider . DoesNotExist : reason = CsrfViewMiddleware ( ) . process_view ( request , None , ( ) , { } ) if reason is not None : return reason form = forms . FederateSelect ( request . POST ) if form . is_valid ( ) : params = utils . copy_params ( request . POST , ignore = { "provider" , "csrfmiddlewaretoken" , "ticket" , "lt" } ) if params . get ( "renew" ) == "False" : del params [ "renew" ] url = utils . reverse_params ( "cas_server:federateAuth" , kwargs = dict ( provider = form . cleaned_data [ "provider" ] . suffix ) , params = params ) return HttpResponseRedirect ( url ) else : return redirect ( "cas_server:login" )
method called on POST request
20,502
def get ( self , request , provider = None ) : if not settings . CAS_FEDERATE : logger . warning ( "CAS_FEDERATE is False, set it to True to use federation" ) return redirect ( "cas_server:login" ) renew = bool ( request . GET . get ( 'renew' ) and request . GET [ 'renew' ] != "False" ) if self . request . session . get ( "authenticated" ) and not renew : logger . warning ( "User already authenticated, dropping federated authentication request" ) return redirect ( "cas_server:login" ) try : provider = FederatedIendityProvider . objects . get ( suffix = provider ) auth = self . get_cas_client ( request , provider , renew ) if 'ticket' not in request . GET : logger . info ( "Trying to authenticate %s again" % auth . provider . server_url ) return HttpResponseRedirect ( auth . get_login_url ( ) ) else : ticket = request . GET [ 'ticket' ] try : if auth . verify_ticket ( ticket ) : logger . info ( "Got a valid ticket for %s from %s" % ( auth . username , auth . provider . server_url ) ) params = utils . copy_params ( request . GET , ignore = { "ticket" , "remember" } ) request . session [ "federate_username" ] = auth . federated_username request . session [ "federate_ticket" ] = ticket auth . register_slo ( auth . federated_username , request . session . session_key , ticket ) url = utils . reverse_params ( "cas_server:login" , params ) response = HttpResponseRedirect ( url ) if request . GET . get ( "remember" ) : max_age = settings . CAS_FEDERATE_REMEMBER_TIMEOUT utils . set_cookie ( response , "remember_provider" , provider . suffix , max_age ) return response else : logger . info ( ( "Got an invalid ticket %s from %s for service %s. " "Retrying authentication" ) % ( ticket , auth . provider . server_url , self . service_url ) ) return HttpResponseRedirect ( auth . get_login_url ( ) ) except SyntaxError as error : messages . add_message ( request , messages . ERROR , _ ( u"Invalid response from your identity provider CAS upon " u"ticket %(ticket)s validation: %(error)r" ) % { 'ticket' : ticket , 'error' : error } ) response = redirect ( "cas_server:login" ) response . delete_cookie ( "remember_provider" ) return response except FederatedIendityProvider . DoesNotExist : logger . warning ( "Identity provider suffix %s not found" % provider ) return redirect ( "cas_server:login" )
method called on GET request
20,503
def init_post ( self , request ) : self . request = request self . service = request . POST . get ( 'service' ) self . renew = bool ( request . POST . get ( 'renew' ) and request . POST [ 'renew' ] != "False" ) self . gateway = request . POST . get ( 'gateway' ) self . method = request . POST . get ( 'method' ) self . ajax = settings . CAS_ENABLE_AJAX_AUTH and 'HTTP_X_AJAX' in request . META if request . POST . get ( 'warned' ) and request . POST [ 'warned' ] != "False" : self . warned = True self . warn = request . POST . get ( 'warn' ) if settings . CAS_FEDERATE : self . username = request . POST . get ( 'username' ) self . ticket = request . POST . get ( 'password' )
Initialize POST received parameters
20,504
def gen_lt ( self ) : self . request . session [ 'lt' ] = self . request . session . get ( 'lt' , [ ] ) + [ utils . gen_lt ( ) ] if len ( self . request . session [ 'lt' ] ) > 100 : self . request . session [ 'lt' ] = self . request . session [ 'lt' ] [ - 100 : ]
Generate a new LoginTicket and add it to the list of valid LT for the user
20,505
def check_lt ( self ) : lt_valid = self . request . session . get ( 'lt' , [ ] ) lt_send = self . request . POST . get ( 'lt' ) self . gen_lt ( ) if lt_send not in lt_valid : return False else : self . request . session [ 'lt' ] . remove ( lt_send ) self . request . session [ 'lt' ] = self . request . session [ 'lt' ] return True
Check is the POSTed LoginTicket is valid if yes invalide it
20,506
def init_get ( self , request ) : self . request = request self . service = request . GET . get ( 'service' ) self . renew = bool ( request . GET . get ( 'renew' ) and request . GET [ 'renew' ] != "False" ) self . gateway = request . GET . get ( 'gateway' ) self . method = request . GET . get ( 'method' ) self . ajax = settings . CAS_ENABLE_AJAX_AUTH and 'HTTP_X_AJAX' in request . META self . warn = request . GET . get ( 'warn' ) if settings . CAS_FEDERATE : self . username = request . session . get ( "federate_username" ) self . ticket = request . session . get ( "federate_ticket" ) if self . username : del request . session [ "federate_username" ] if self . ticket : del request . session [ "federate_ticket" ]
Initialize GET received parameters
20,507
def process_get ( self ) : self . gen_lt ( ) if not self . request . session . get ( "authenticated" ) or self . renew : self . init_form ( ) return self . USER_NOT_AUTHENTICATED return self . USER_AUTHENTICATED
Analyse the GET request
20,508
def init_form ( self , values = None ) : if values : values = values . copy ( ) values [ 'lt' ] = self . request . session [ 'lt' ] [ - 1 ] form_initial = { 'service' : self . service , 'method' : self . method , 'warn' : ( self . warn or self . request . session . get ( "warn" ) or self . request . COOKIES . get ( 'warn' ) ) , 'lt' : self . request . session [ 'lt' ] [ - 1 ] , 'renew' : self . renew } if settings . CAS_FEDERATE : if self . username and self . ticket : form_initial [ 'username' ] = self . username form_initial [ 'password' ] = self . ticket form_initial [ 'ticket' ] = self . ticket self . form = forms . FederateUserCredential ( values , initial = form_initial ) else : self . form = forms . FederateSelect ( values , initial = form_initial ) else : self . form = forms . UserCredential ( values , initial = form_initial )
Initialization of the good form depending of POST and GET parameters
20,509
def service_login ( self ) : try : service_pattern = ServicePattern . validate ( self . service ) service_pattern . check_user ( self . user ) if self . request . session . get ( "warn" , True ) and not self . warned : messages . add_message ( self . request , messages . WARNING , _ ( u"Authentication has been required by service %(name)s (%(url)s)" ) % { 'name' : service_pattern . name , 'url' : self . service } ) if self . ajax : data = { "status" : "error" , "detail" : "confirmation needed" } return json_response ( self . request , data ) else : warn_form = forms . WarnForm ( initial = { 'service' : self . service , 'renew' : self . renew , 'gateway' : self . gateway , 'method' : self . method , 'warned' : True , 'lt' : self . request . session [ 'lt' ] [ - 1 ] } ) return render ( self . request , settings . CAS_WARN_TEMPLATE , utils . context ( { 'form' : warn_form } ) ) else : list ( messages . get_messages ( self . request ) ) redirect_url = self . user . get_service_url ( self . service , service_pattern , renew = self . renewed ) if not self . ajax : return HttpResponseRedirect ( redirect_url ) else : data = { "status" : "success" , "detail" : "auth" , "url" : redirect_url } return json_response ( self . request , data ) except ServicePattern . DoesNotExist : error = 1 messages . add_message ( self . request , messages . ERROR , _ ( u'Service %(url)s not allowed.' ) % { 'url' : self . service } ) except models . BadUsername : error = 2 messages . add_message ( self . request , messages . ERROR , _ ( u"Username not allowed" ) ) except models . BadFilter : error = 3 messages . add_message ( self . request , messages . ERROR , _ ( u"User characteristics not allowed" ) ) except models . UserFieldNotDefined : error = 4 messages . add_message ( self . request , messages . ERROR , _ ( u"The attribute %(field)s is needed to use" u" that service" ) % { 'field' : service_pattern . user_field } ) if self . gateway and not self . ajax : list ( messages . get_messages ( self . request ) ) return HttpResponseRedirect ( self . service ) if not self . ajax : return render ( self . request , settings . CAS_LOGGED_TEMPLATE , utils . context ( { 'session' : self . request . session } ) ) else : data = { "status" : "error" , "detail" : "auth" , "code" : error } return json_response ( self . request , data )
Perform login against a service
20,510
def authenticated ( self ) : try : self . user = models . User . objects . get ( username = self . request . session . get ( "username" ) , session_key = self . request . session . session_key ) except models . User . DoesNotExist : logger . warning ( "User %s seems authenticated but is not found in the database." % ( self . request . session . get ( "username" ) , ) ) self . logout ( ) if self . ajax : data = { "status" : "error" , "detail" : "login required" , "url" : utils . reverse_params ( "cas_server:login" , params = self . request . GET ) } return json_response ( self . request , data ) else : return utils . redirect_params ( "cas_server:login" , params = self . request . GET ) if self . service : return self . service_login ( ) else : if self . ajax : data = { "status" : "success" , "detail" : "logged" } return json_response ( self . request , data ) else : return render ( self . request , settings . CAS_LOGGED_TEMPLATE , utils . context ( { 'session' : self . request . session } ) )
Processing authenticated users
20,511
def not_authenticated ( self ) : if self . service : try : service_pattern = ServicePattern . validate ( self . service ) if self . gateway and not self . ajax : list ( messages . get_messages ( self . request ) ) return HttpResponseRedirect ( self . service ) if settings . CAS_SHOW_SERVICE_MESSAGES : if self . request . session . get ( "authenticated" ) and self . renew : messages . add_message ( self . request , messages . WARNING , _ ( u"Authentication renewal required by service %(name)s (%(url)s)." ) % { 'name' : service_pattern . name , 'url' : self . service } ) else : messages . add_message ( self . request , messages . WARNING , _ ( u"Authentication required by service %(name)s (%(url)s)." ) % { 'name' : service_pattern . name , 'url' : self . service } ) except ServicePattern . DoesNotExist : if settings . CAS_SHOW_SERVICE_MESSAGES : messages . add_message ( self . request , messages . ERROR , _ ( u'Service %s not allowed' ) % self . service ) if self . ajax : data = { "status" : "error" , "detail" : "login required" , "url" : utils . reverse_params ( "cas_server:login" , params = self . request . GET ) } return json_response ( self . request , data ) else : if settings . CAS_FEDERATE : if self . username and self . ticket : return render ( self . request , settings . CAS_LOGIN_TEMPLATE , utils . context ( { 'form' : self . form , 'auto_submit' : True , 'post_url' : reverse ( "cas_server:login" ) } ) ) else : if ( self . request . COOKIES . get ( 'remember_provider' ) and FederatedIendityProvider . objects . filter ( suffix = self . request . COOKIES [ 'remember_provider' ] ) ) : params = utils . copy_params ( self . request . GET ) url = utils . reverse_params ( "cas_server:federateAuth" , params = params , kwargs = dict ( provider = self . request . COOKIES [ 'remember_provider' ] ) ) return HttpResponseRedirect ( url ) else : if self . renew and self . request . session . get ( "authenticated" ) : try : user = FederatedUser . get_from_federated_username ( self . request . session . get ( "username" ) ) params = utils . copy_params ( self . request . GET ) url = utils . reverse_params ( "cas_server:federateAuth" , params = params , kwargs = dict ( provider = user . provider . suffix ) ) return HttpResponseRedirect ( url ) except FederatedUser . DoesNotExist : pass return render ( self . request , settings . CAS_LOGIN_TEMPLATE , utils . context ( { 'form' : self . form , 'post_url' : reverse ( "cas_server:federateAuth" ) } ) ) else : return render ( self . request , settings . CAS_LOGIN_TEMPLATE , utils . context ( { 'form' : self . form } ) )
Processing non authenticated users
20,512
def common ( self ) : if self . request . session . get ( "authenticated" ) and ( not self . renew or self . renewed ) : return self . authenticated ( ) else : return self . not_authenticated ( )
Common part execute uppon GET and POST request
20,513
def process_ticket ( self ) : try : proxies = [ ] if self . allow_proxy_ticket : ticket = models . Ticket . get ( self . ticket , self . renew ) else : ticket = models . ServiceTicket . get ( self . ticket , self . renew ) try : for prox in ticket . proxies . all ( ) : proxies . append ( prox . url ) except AttributeError : pass if ticket . service != self . service : raise ValidateError ( u'INVALID_SERVICE' , self . service ) return ticket , proxies except Ticket . DoesNotExist : raise ValidateError ( u'INVALID_TICKET' , self . ticket ) except ( ServiceTicket . DoesNotExist , ProxyTicket . DoesNotExist ) : raise ValidateError ( u'INVALID_TICKET' , 'ticket not found' )
fetch the ticket against the database and check its validity
20,514
def process_pgturl ( self , params ) : try : pattern = ServicePattern . validate ( self . pgt_url ) if pattern . proxy_callback : proxyid = utils . gen_pgtiou ( ) pticket = ProxyGrantingTicket . objects . create ( user = self . ticket . user , service = self . pgt_url , service_pattern = pattern , single_log_out = pattern . single_log_out ) url = utils . update_url ( self . pgt_url , { 'pgtIou' : proxyid , 'pgtId' : pticket . value } ) try : ret = requests . get ( url , verify = settings . CAS_PROXY_CA_CERTIFICATE_PATH ) if ret . status_code == 200 : params [ 'proxyGrantingTicket' ] = proxyid else : pticket . delete ( ) logger . info ( ( "ValidateService: ticket %s validated for user %s on service %s. " "Proxy Granting Ticket transmited to %s." ) % ( self . ticket . value , self . ticket . user . username , self . ticket . service , self . pgt_url ) ) logger . debug ( "ValidateService: User attributs are:\n%s" % ( pprint . pformat ( self . ticket . attributs ) , ) ) return render ( self . request , "cas_server/serviceValidate.xml" , params , content_type = "text/xml; charset=utf-8" ) except requests . exceptions . RequestException as error : error = utils . unpack_nested_exception ( error ) raise ValidateError ( u'INVALID_PROXY_CALLBACK' , u"%s: %s" % ( type ( error ) , str ( error ) ) ) else : raise ValidateError ( u'INVALID_PROXY_CALLBACK' , u"callback url not allowed by configuration" ) except ServicePattern . DoesNotExist : raise ValidateError ( u'INVALID_PROXY_CALLBACK' , u'callback url not allowed by configuration' )
Handle PGT request
20,515
def process_proxy ( self ) : try : pattern = ServicePattern . validate ( self . target_service ) if not pattern . proxy : raise ValidateError ( u'UNAUTHORIZED_SERVICE' , u'the service %s does not allow proxy tickets' % self . target_service ) ticket = ProxyGrantingTicket . get ( self . pgt ) pattern . check_user ( ticket . user ) pticket = ticket . user . get_ticket ( ProxyTicket , self . target_service , pattern , renew = False ) models . Proxy . objects . create ( proxy_ticket = pticket , url = ticket . service ) logger . info ( "Proxy ticket created for user %s on service %s." % ( ticket . user . username , self . target_service ) ) return render ( self . request , "cas_server/proxy.xml" , { 'ticket' : pticket . value } , content_type = "text/xml; charset=utf-8" ) except ( Ticket . DoesNotExist , ProxyGrantingTicket . DoesNotExist ) : raise ValidateError ( u'INVALID_TICKET' , u'PGT %s not found' % self . pgt ) except ServicePattern . DoesNotExist : raise ValidateError ( u'UNAUTHORIZED_SERVICE' , self . target_service ) except ( models . BadUsername , models . BadFilter , models . UserFieldNotDefined ) : raise ValidateError ( u'UNAUTHORIZED_USER' , u'User %s not allowed on %s' % ( ticket . user . username , self . target_service ) )
handle PT request
20,516
def process_ticket ( self ) : try : auth_req = self . root . getchildren ( ) [ 1 ] . getchildren ( ) [ 0 ] ticket = auth_req . getchildren ( ) [ 0 ] . text ticket = models . Ticket . get ( ticket ) if ticket . service != self . target : raise SamlValidateError ( u'AuthnFailed' , u'TARGET %s does not match ticket service' % self . target ) return ticket except ( IndexError , KeyError ) : raise SamlValidateError ( u'VersionMismatch' ) except Ticket . DoesNotExist : raise SamlValidateError ( u'AuthnFailed' , u'ticket %s should begin with PT- or ST-' % ticket ) except ( ServiceTicket . DoesNotExist , ProxyTicket . DoesNotExist ) : raise SamlValidateError ( u'AuthnFailed' , u'ticket %s not found' % ticket )
validate ticket from SAML XML body
20,517
def main ( source ) : if source is None : click . echo ( "You need to supply a file or url to a schema to a swagger schema, for" "the validator to work." ) return 1 try : load ( source ) click . echo ( "Validation passed" ) return 0 except ValidationError as e : raise click . ClickException ( str ( e ) )
For a given command line supplied argument negotiate the content parse the schema and then return any issues to stdout or if no schema issues return success exit code .
20,518
def load_source ( source ) : if isinstance ( source , collections . Mapping ) : return deepcopy ( source ) elif hasattr ( source , 'read' ) and callable ( source . read ) : raw_source = source . read ( ) elif os . path . exists ( os . path . expanduser ( str ( source ) ) ) : with open ( os . path . expanduser ( str ( source ) ) , 'r' ) as source_file : raw_source = source_file . read ( ) elif isinstance ( source , six . string_types ) : parts = urlparse . urlparse ( source ) if parts . scheme and parts . netloc : response = requests . get ( source ) if isinstance ( response . content , six . binary_type ) : raw_source = six . text_type ( response . content , encoding = 'utf-8' ) else : raw_source = response . content else : raw_source = source try : try : return json . loads ( raw_source ) except ValueError : pass try : return yaml . safe_load ( raw_source ) except ( yaml . scanner . ScannerError , yaml . parser . ParserError ) : pass except NameError : pass raise ValueError ( "Unable to parse `{0}`. Tried yaml and json." . format ( source ) , )
Common entry point for loading some form of raw swagger schema .
20,519
def validate ( raw_schema , target = None , ** kwargs ) : schema = schema_validator ( raw_schema , ** kwargs ) if target is not None : validate_object ( target , schema = schema , ** kwargs )
Given the python representation of a JSONschema as defined in the swagger spec validate that the schema complies to spec . If target is provided that target will be validated against the provided schema .
20,520
def validate_api_response ( schema , raw_response , request_method = 'get' , raw_request = None ) : request = None if raw_request is not None : request = normalize_request ( raw_request ) response = None if raw_response is not None : response = normalize_response ( raw_response , request = request ) if response is not None : validate_response ( response = response , request_method = request_method , schema = schema )
Validate the response of an api call against a swagger schema .
20,521
def find_parameter ( parameters , ** kwargs ) : matching_parameters = filter_parameters ( parameters , ** kwargs ) if len ( matching_parameters ) == 1 : return matching_parameters [ 0 ] elif len ( matching_parameters ) > 1 : raise MultipleParametersFound ( ) raise NoParameterFound ( )
Given a list of parameters find the one with the given name .
20,522
def merge_parameter_lists ( * parameter_definitions ) : merged_parameters = { } for parameter_list in parameter_definitions : for parameter in parameter_list : key = ( parameter [ 'name' ] , parameter [ 'in' ] ) merged_parameters [ key ] = parameter return merged_parameters . values ( )
Merge multiple lists of parameters into a single list . If there are any duplicate definitions the last write wins .
20,523
def validate_status_code_to_response_definition ( response , operation_definition ) : status_code = response . status_code operation_responses = { str ( code ) : val for code , val in operation_definition [ 'responses' ] . items ( ) } key = status_code if key not in operation_responses : key = 'default' try : response_definition = operation_responses [ key ] except KeyError : raise ValidationError ( MESSAGES [ 'response' ] [ 'invalid_status_code' ] . format ( status_code , ', ' . join ( operation_responses . keys ( ) ) , ) , ) return response_definition
Given a response validate that the response status code is in the accepted status codes defined by this endpoint .
20,524
def generate_path_validator ( api_path , path_definition , parameters , context , ** kwargs ) : path_level_parameters = dereference_parameter_list ( path_definition . get ( 'parameters' , [ ] ) , context , ) operation_level_parameters = dereference_parameter_list ( parameters , context , ) all_parameters = merge_parameter_lists ( path_level_parameters , operation_level_parameters , ) in_path_parameters = filter_parameters ( all_parameters , in_ = PATH ) return chain_reduce_partial ( attrgetter ( 'path' ) , generate_path_parameters_validator ( api_path , in_path_parameters , context ) , )
Generates a callable for validating the parameters in a response object .
20,525
def validate_response ( response , request_method , schema ) : with ErrorDict ( ) as errors : try : api_path = validate_path_to_api_path ( path = response . path , context = schema , ** schema ) except ValidationError as err : errors [ 'path' ] . extend ( list ( err . messages ) ) return path_definition = schema [ 'paths' ] [ api_path ] or { } try : operation_definition = validate_request_method_to_operation ( request_method = request_method , path_definition = path_definition , ) except ValidationError as err : errors [ 'method' ] . add_error ( err . detail ) return try : response_definition = validate_status_code_to_response_definition ( response = response , operation_definition = operation_definition , ) except ValidationError as err : errors [ 'status_code' ] . add_error ( err . detail ) else : response_validator = generate_response_validator ( api_path , operation_definition = operation_definition , path_definition = path_definition , response_definition = response_definition , context = schema , ) try : response_validator ( response , context = schema ) except ValidationError as err : errors [ 'body' ] . add_error ( err . detail )
Response validation involves the following steps . 4 . validate that the response status_code is in the allowed responses for the request method . 5 . validate that the response content validates against any provided schemas for the responses . 6 . headers content - types etc ... ???
20,526
def construct_schema_validators ( schema , context ) : validators = ValidationDict ( ) if '$ref' in schema : validators . add_validator ( '$ref' , SchemaReferenceValidator ( schema [ '$ref' ] , context ) , ) if 'properties' in schema : for property_ , property_schema in schema [ 'properties' ] . items ( ) : property_validator = generate_object_validator ( schema = property_schema , context = context , ) validators . add_property_validator ( property_ , property_validator ) if schema . get ( 'additionalProperties' ) is False : validators . add_validator ( 'additionalProperties' , generate_additional_properties_validator ( context = context , ** schema ) , ) assert 'context' not in schema for key in schema : if key in validator_mapping : validators . add_validator ( key , validator_mapping [ key ] ( context = context , ** schema ) ) return validators
Given a schema object construct a dictionary of validators needed to validate a response matching the given schema .
20,527
def validate_type ( value , types , ** kwargs ) : if not is_value_of_any_type ( value , types ) : raise ValidationError ( MESSAGES [ 'type' ] [ 'invalid' ] . format ( repr ( value ) , get_type_for_value ( value ) , types , ) )
Validate that the value is one of the provided primative types .
20,528
def generate_type_validator ( type_ , ** kwargs ) : if is_non_string_iterable ( type_ ) : types = tuple ( type_ ) else : types = ( type_ , ) if kwargs . get ( 'x-nullable' , False ) and NULL not in types : types = types + ( NULL , ) return functools . partial ( validate_type , types = types )
Generates a callable validator for the given type or iterable of types .
20,529
def validate_multiple_of ( value , divisor , ** kwargs ) : if not decimal . Decimal ( str ( value ) ) % decimal . Decimal ( str ( divisor ) ) == 0 : raise ValidationError ( MESSAGES [ 'multiple_of' ] [ 'invalid' ] . format ( divisor , value ) , )
Given a value and a divisor validate that the value is divisible by the divisor .
20,530
def validate_minimum ( value , minimum , is_exclusive , ** kwargs ) : if is_exclusive : comparison_text = "greater than" compare_fn = operator . gt else : comparison_text = "greater than or equal to" compare_fn = operator . ge if not compare_fn ( value , minimum ) : raise ValidationError ( MESSAGES [ 'minimum' ] [ 'invalid' ] . format ( value , comparison_text , minimum ) , )
Validator function for validating that a value does not violate it s minimum allowed value . This validation can be inclusive or exclusive of the minimum depending on the value of is_exclusive .
20,531
def generate_minimum_validator ( minimum , exclusiveMinimum = False , ** kwargs ) : return functools . partial ( validate_minimum , minimum = minimum , is_exclusive = exclusiveMinimum )
Generator function returning a callable for minimum value validation .
20,532
def validate_maximum ( value , maximum , is_exclusive , ** kwargs ) : if is_exclusive : comparison_text = "less than" compare_fn = operator . lt else : comparison_text = "less than or equal to" compare_fn = operator . le if not compare_fn ( value , maximum ) : raise ValidationError ( MESSAGES [ 'maximum' ] [ 'invalid' ] . format ( value , comparison_text , maximum ) , )
Validator function for validating that a value does not violate it s maximum allowed value . This validation can be inclusive or exclusive of the maximum depending on the value of is_exclusive .
20,533
def generate_maximum_validator ( maximum , exclusiveMaximum = False , ** kwargs ) : return functools . partial ( validate_maximum , maximum = maximum , is_exclusive = exclusiveMaximum )
Generator function returning a callable for maximum value validation .
20,534
def validate_min_items ( value , minimum , ** kwargs ) : if len ( value ) < minimum : raise ValidationError ( MESSAGES [ 'min_items' ] [ 'invalid' ] . format ( minimum , len ( value ) , ) , )
Validator for ARRAY types to enforce a minimum number of items allowed for the ARRAY to be valid .
20,535
def validate_max_items ( value , maximum , ** kwargs ) : if len ( value ) > maximum : raise ValidationError ( MESSAGES [ 'max_items' ] [ 'invalid' ] . format ( maximum , len ( value ) , ) , )
Validator for ARRAY types to enforce a maximum number of items allowed for the ARRAY to be valid .
20,536
def validate_unique_items ( value , ** kwargs ) : counter = collections . Counter ( ( json . dumps ( v , sort_keys = True ) for v in value ) ) dupes = [ json . loads ( v ) for v , count in counter . items ( ) if count > 1 ] if dupes : raise ValidationError ( MESSAGES [ 'unique_items' ] [ 'invalid' ] . format ( repr ( dupes ) , ) , )
Validator for ARRAY types to enforce that all array items must be unique .
20,537
def validate_object ( obj , field_validators = None , non_field_validators = None , schema = None , context = None ) : if schema is None : schema = { } if context is None : context = { } if field_validators is None : field_validators = ValidationDict ( ) if non_field_validators is None : non_field_validators = ValidationList ( ) from flex . validation . schema import ( construct_schema_validators , ) schema_validators = construct_schema_validators ( schema , context ) if '$ref' in schema_validators and hasattr ( schema_validators [ '$ref' ] , 'validators' ) : ref_ = field_validators . pop ( '$ref' ) for k , v in ref_ . validators . items ( ) : if k not in schema_validators : schema_validators . add_validator ( k , v ) if 'discriminator' in schema : schema_validators = add_polymorphism_requirements ( obj , schema , context , schema_validators ) del schema [ 'discriminator' ] schema_validators . update ( field_validators ) schema_validators . validate_object ( obj , context = context ) non_field_validators . validate_object ( obj , context = context ) return obj
Takes a mapping and applies a mapping of validator functions to it collecting and reraising any validation errors that occur .
20,538
def validate_request_method_to_operation ( request_method , path_definition ) : try : operation_definition = path_definition [ request_method ] except KeyError : allowed_methods = set ( REQUEST_METHODS ) . intersection ( path_definition . keys ( ) ) raise ValidationError ( MESSAGES [ 'request' ] [ 'invalid_method' ] . format ( request_method , allowed_methods , ) , ) return operation_definition
Given a request method validate that the request method is valid for the api path .
20,539
def validate_path_to_api_path ( path , paths , basePath = '' , context = None , ** kwargs ) : if context is None : context = { } try : api_path = match_path_to_api_path ( path_definitions = paths , target_path = path , base_path = basePath , context = context , ) except LookupError as err : raise ValidationError ( str ( err ) ) except MultiplePathsFound as err : raise ValidationError ( str ( err ) ) return api_path
Given a path find the api_path it matches .
20,540
def validate_path_parameters ( target_path , api_path , path_parameters , context ) : base_path = context . get ( 'basePath' , '' ) full_api_path = re . sub ( NORMALIZE_SLASH_REGEX , '/' , base_path + api_path ) parameter_values = get_path_parameter_values ( target_path , full_api_path , path_parameters , context , ) validate_parameters ( parameter_values , path_parameters , context = context )
Helper function for validating a request path
20,541
def construct_parameter_validators ( parameter , context ) : validators = ValidationDict ( ) if '$ref' in parameter : validators . add_validator ( '$ref' , ParameterReferenceValidator ( parameter [ '$ref' ] , context ) , ) for key in parameter : if key in validator_mapping : validators . add_validator ( key , validator_mapping [ key ] ( context = context , ** parameter ) , ) if 'schema' in parameter : schema_validators = construct_schema_validators ( parameter [ 'schema' ] , context = context ) for key , value in schema_validators . items ( ) : validators . setdefault ( key , value ) return validators
Constructs a dictionary of validator functions for the provided parameter definition .
20,542
def construct_multi_parameter_validators ( parameters , context ) : validators = ValidationDict ( ) for parameter in parameters : key = parameter [ 'name' ] if key in validators : raise ValueError ( "Duplicate parameter name {0}" . format ( key ) ) parameter_validators = construct_parameter_validators ( parameter , context = context ) validators . add_validator ( key , generate_object_validator ( field_validators = parameter_validators ) , ) return validators
Given an iterable of parameters returns a dictionary of validator functions for each parameter . Note that this expects the parameters to be unique in their name value and throws an error if this is not the case .
20,543
def generate_path_parameters_validator ( api_path , path_parameters , context ) : path_parameter_validator = functools . partial ( validate_path_parameters , api_path = api_path , path_parameters = path_parameters , context = context , ) return path_parameter_validator
Generates a validator function that given a path validates that it against the path parameters
20,544
def escape_regex_special_chars ( api_path ) : def substitute ( string , replacements ) : pattern , repl = replacements return re . sub ( pattern , repl , string ) return functools . reduce ( substitute , REGEX_REPLACEMENTS , api_path )
Turns the non prametrized path components into strings subtable for using as a regex pattern . This primarily involves escaping special characters so that the actual character is matched in the regex .
20,545
def construct_parameter_pattern ( parameter ) : name = parameter [ 'name' ] type = parameter [ 'type' ] repeated = '[^/]' if type == 'integer' : repeated = '\d' return "(?P<{name}>{repeated}+)" . format ( name = name , repeated = repeated )
Given a parameter definition returns a regex pattern that will match that part of the path .
20,546
def path_to_pattern ( api_path , parameters ) : parts = re . split ( PARAMETER_REGEX , api_path ) pattern = '' . join ( ( process_path_part ( part , parameters ) for part in parts ) ) if not pattern . startswith ( '^' ) : pattern = "^{0}" . format ( pattern ) if not pattern . endswith ( '$' ) : pattern = "{0}$" . format ( pattern ) return pattern
Given an api path possibly with parameter notation return a pattern suitable for turing into a regular expression which will match request paths that conform to the parameter definitions and the api path .
20,547
def match_path_to_api_path ( path_definitions , target_path , base_path = '' , context = None ) : if context is None : context = { } assert isinstance ( context , collections . Mapping ) if target_path . startswith ( base_path ) : normalized_target_path = re . sub ( NORMALIZE_SLASH_REGEX , '/' , target_path ) matching_api_paths = list ( ) matching_api_paths_regex = list ( ) for p , v in path_definitions . items ( ) : full_path = re . sub ( NORMALIZE_SLASH_REGEX , '/' , base_path + p ) r = path_to_regex ( api_path = full_path , path_parameters = extract_path_parameters ( v ) , operation_parameters = extract_operation_parameters ( v ) , context = context , ) if full_path == normalized_target_path : matching_api_paths . append ( p ) elif r . match ( normalized_target_path ) : matching_api_paths_regex . append ( ( p , r . match ( normalized_target_path ) ) ) target_path = target_path [ len ( base_path ) : ] else : matching_api_paths = [ ] matching_api_paths_regex = [ ] if not matching_api_paths and not matching_api_paths_regex : fstr = MESSAGES [ 'path' ] [ 'no_matching_paths_found' ] . format ( target_path ) raise LookupError ( fstr ) elif len ( matching_api_paths ) == 1 : return matching_api_paths [ 0 ] elif len ( matching_api_paths ) > 1 : raise MultiplePathsFound ( MESSAGES [ 'path' ] [ 'multiple_paths_found' ] . format ( target_path , [ v [ 0 ] for v in matching_api_paths ] , ) ) elif len ( matching_api_paths_regex ) == 1 : return matching_api_paths_regex [ 0 ] [ 0 ] elif len ( matching_api_paths_regex ) > 1 : matches_by_path_size = collections . defaultdict ( list ) for path , match in matching_api_paths_regex : matches_by_path_size [ len ( path ) ] . append ( path ) longest_match = max ( matches_by_path_size . keys ( ) ) if len ( matches_by_path_size [ longest_match ] ) == 1 : return matches_by_path_size [ longest_match ] [ 0 ] raise MultiplePathsFound ( MESSAGES [ 'path' ] [ 'multiple_paths_found' ] . format ( target_path , [ v [ 0 ] for v in matching_api_paths_regex ] , ) ) else : return matching_api_paths_regex [ 0 ] [ 0 ]
Match a request or response path to one of the api paths .
20,548
def validate_request ( request , schema ) : with ErrorDict ( ) as errors : try : api_path = validate_path_to_api_path ( path = request . path , context = schema , ** schema ) except ValidationError as err : errors [ 'path' ] . add_error ( err . detail ) return path_definition = schema [ 'paths' ] [ api_path ] or { } if not path_definition : return try : operation_definition = validate_request_method_to_operation ( request_method = request . method , path_definition = path_definition , ) except ValidationError as err : errors [ 'method' ] . add_error ( err . detail ) return if operation_definition is None : return operation_validators = construct_operation_validators ( api_path = api_path , path_definition = path_definition , operation_definition = operation_definition , context = schema , ) try : validate_operation ( request , operation_validators , context = schema ) except ValidationError as err : errors [ 'method' ] . add_error ( err . detail )
Request validation does the following steps .
20,549
def normalize_request ( request ) : if isinstance ( request , Request ) : return request for normalizer in REQUEST_NORMALIZERS : try : return normalizer ( request ) except TypeError : continue raise ValueError ( "Unable to normalize the provided request" )
Given a request normalize it to the internal Request class .
20,550
def normalize_response ( response , request = None ) : if isinstance ( response , Response ) : return response if request is not None and not isinstance ( request , Request ) : request = normalize_request ( request ) for normalizer in RESPONSE_NORMALIZERS : try : return normalizer ( response , request = request ) except TypeError : continue raise ValueError ( "Unable to normalize the provided response" )
Given a response normalize it to the internal Response class . This also involves normalizing the associated request object .
20,551
def generate_header_validator ( headers , context , ** kwargs ) : validators = ValidationDict ( ) for header_definition in headers : header_processor = generate_value_processor ( context = context , ** header_definition ) header_validator = generate_object_validator ( field_validators = construct_header_validators ( header_definition , context = context ) , ) validators . add_property_validator ( header_definition [ 'name' ] , chain_reduce_partial ( header_processor , header_validator , ) , ) return generate_object_validator ( field_validators = validators )
Generates a validation function that will validate a dictionary of headers .
20,552
def generate_parameters_validator ( api_path , path_definition , parameters , context , ** kwargs ) : validators = ValidationDict ( ) path_level_parameters = dereference_parameter_list ( path_definition . get ( 'parameters' , [ ] ) , context , ) operation_level_parameters = dereference_parameter_list ( parameters , context , ) all_parameters = merge_parameter_lists ( path_level_parameters , operation_level_parameters , ) in_path_parameters = filter_parameters ( all_parameters , in_ = PATH ) validators . add_validator ( 'path' , chain_reduce_partial ( attrgetter ( 'path' ) , generate_path_parameters_validator ( api_path , in_path_parameters , context ) , ) , ) in_query_parameters = filter_parameters ( all_parameters , in_ = QUERY ) validators . add_validator ( 'query' , chain_reduce_partial ( attrgetter ( 'query_data' ) , functools . partial ( validate_query_parameters , query_parameters = in_query_parameters , context = context , ) , ) , ) in_header_parameters = filter_parameters ( all_parameters , in_ = HEADER ) validators . add_validator ( 'headers' , chain_reduce_partial ( attrgetter ( 'headers' ) , generate_header_validator ( in_header_parameters , context ) , ) , ) in_request_body_parameters = filter_parameters ( all_parameters , in_ = BODY ) validators . add_validator ( 'request_body' , chain_reduce_partial ( attrgetter ( 'data' ) , generate_request_body_validator ( in_request_body_parameters , context ) , ) ) return generate_object_validator ( field_validators = validators )
Generates a validator function to validate .
20,553
def partial_safe_wraps ( wrapped_func , * args , ** kwargs ) : if isinstance ( wrapped_func , functools . partial ) : return partial_safe_wraps ( wrapped_func . func ) else : return functools . wraps ( wrapped_func )
A version of functools . wraps that is safe to wrap a partial in .
20,554
def skip_if_empty ( func ) : @ partial_safe_wraps ( func ) def inner ( value , * args , ** kwargs ) : if value is EMPTY : return else : return func ( value , * args , ** kwargs ) return inner
Decorator for validation functions which makes them pass if the value passed in is the EMPTY sentinal value .
20,555
def rewrite_reserved_words ( func ) : @ partial_safe_wraps ( func ) def inner ( * args , ** kwargs ) : for word in RESERVED_WORDS : key = "{0}_" . format ( word ) if key in kwargs : kwargs [ word ] = kwargs . pop ( key ) return func ( * args , ** kwargs ) return inner
Given a function whos kwargs need to contain a reserved word such as in allow calling that function with the keyword as in_ such that function kwargs are rewritten to use the reserved word .
20,556
def any_validator ( obj , validators , ** kwargs ) : if not len ( validators ) > 1 : raise ValueError ( "any_validator requires at least 2 validator. Only got " "{0}" . format ( len ( validators ) ) ) errors = ErrorDict ( ) for key , validator in validators . items ( ) : try : validator ( obj , ** kwargs ) except ValidationError as err : errors [ key ] = err . detail else : break else : if len ( errors ) == 1 : error = errors . values ( ) [ 0 ] raise ValidationError ( error ) else : errors . raise_ ( )
Attempt multiple validators on an object .
20,557
def _extract_to_tempdir ( archive_filename ) : if not os . path . exists ( archive_filename ) : raise Exception ( "Archive '%s' does not exist" % ( archive_filename ) ) tempdir = tempfile . mkdtemp ( prefix = "metaextract_" ) current_cwd = os . getcwd ( ) try : if tarfile . is_tarfile ( archive_filename ) : with tarfile . open ( archive_filename ) as f : f . extractall ( tempdir ) elif zipfile . is_zipfile ( archive_filename ) : with zipfile . ZipFile ( archive_filename ) as f : f . extractall ( tempdir ) else : raise Exception ( "Can not extract '%s'. " "Not a tar or zip file" % archive_filename ) os . chdir ( tempdir ) yield tempdir finally : os . chdir ( current_cwd ) shutil . rmtree ( tempdir )
extract the given tarball or zipfile to a tempdir and change the cwd to the new tempdir . Delete the tempdir at the end
20,558
def _enter_single_subdir ( root_dir ) : current_cwd = os . getcwd ( ) try : dest_dir = root_dir dir_list = os . listdir ( root_dir ) if len ( dir_list ) == 1 : first = os . path . join ( root_dir , dir_list [ 0 ] ) if os . path . isdir ( first ) : dest_dir = first else : dest_dir = root_dir os . chdir ( dest_dir ) yield dest_dir finally : os . chdir ( current_cwd )
if the given directory has just a single subdir enter that
20,559
def _set_file_encoding_utf8 ( filename ) : with open ( filename , 'r+' ) as f : content = f . read ( ) f . seek ( 0 , 0 ) f . write ( "# -*- coding: utf-8 -*-\n" + content )
set a encoding header as suggested in PEP - 0263 . This is not entirely correct because we don t know the encoding of the given file but it s at least a chance to get metadata from the setup . py
20,560
def _setup_py_run_from_dir ( root_dir , py_interpreter ) : data = { } with _enter_single_subdir ( root_dir ) as single_subdir : if not os . path . exists ( "setup.py" ) : raise Exception ( "'setup.py' does not exist in '%s'" % ( single_subdir ) ) output_json = tempfile . NamedTemporaryFile ( ) cmd = "%s setup.py -q --command-packages metaextract " "metaextract -o %s " % ( py_interpreter , output_json . name ) try : subprocess . check_output ( cmd , stderr = subprocess . STDOUT , shell = True ) except subprocess . CalledProcessError : _set_file_encoding_utf8 ( "setup.py" ) subprocess . check_output ( cmd , shell = True ) with open ( output_json . name , "r" ) as f : data = json . loads ( f . read ( ) ) for key in [ 'data_files' , 'entry_points' , 'extras_require' , 'install_requires' , 'setup_requires' , 'scripts' , 'tests_require' , 'tests_suite' ] : if key in data [ 'data' ] and isinstance ( data [ 'data' ] [ key ] , list ) : data [ 'data' ] [ key ] = sorted ( data [ 'data' ] [ key ] ) return data
run the extractmeta command via the setup . py in the given root_dir . the output of extractmeta is json and is stored in a tempfile which is then read in and returned as data
20,561
def from_archive ( archive_filename , py_interpreter = sys . executable ) : with _extract_to_tempdir ( archive_filename ) as root_dir : data = _setup_py_run_from_dir ( root_dir , py_interpreter ) return data
extract metadata from a given sdist archive file
20,562
def xmlns ( source ) : namespaces = { } events = ( "end" , "start-ns" , "end-ns" ) for ( event , elem ) in iterparse ( source , events ) : if event == "start-ns" : prefix , ns = elem namespaces [ prefix ] = ns elif event == "end" : break if hasattr ( source , "seek" ) : source . seek ( 0 ) return namespaces
Returns a map of prefix to namespace for the given XML file .
20,563
def create_block ( mc , block_id , subtype = None ) : ptx , pty , ptz = mc . player . getTilePos ( ) px , py , pz = mc . player . getPos ( ) if subtype is None : mc . setBlock ( ptx , pty , ptz , block_id ) else : mc . setBlock ( ptx , pty , ptz , block_id , subtype ) mc . player . setPos ( px , py + 1 , pz )
Build a block with the specified id and subtype under the player in the Minecraft world . Subtype is optional and can be specified as None to use the default subtype for the block .
20,564
def _busy_wait_ms ( self , ms ) : start = time . time ( ) delta = ms / 1000.0 while ( time . time ( ) - start ) <= delta : pass
Busy wait for the specified number of milliseconds .
20,565
def _write_frame ( self , data ) : assert data is not None and 0 < len ( data ) < 255 , 'Data must be array of 1 to 255 bytes.' length = len ( data ) frame = bytearray ( length + 8 ) frame [ 0 ] = PN532_SPI_DATAWRITE frame [ 1 ] = PN532_PREAMBLE frame [ 2 ] = PN532_STARTCODE1 frame [ 3 ] = PN532_STARTCODE2 frame [ 4 ] = length & 0xFF frame [ 5 ] = self . _uint8_add ( ~ length , 1 ) frame [ 6 : - 2 ] = data checksum = reduce ( self . _uint8_add , data , 0xFF ) frame [ - 2 ] = ~ checksum & 0xFF frame [ - 1 ] = PN532_POSTAMBLE logger . debug ( 'Write frame: 0x{0}' . format ( binascii . hexlify ( frame ) ) ) self . _gpio . set_low ( self . _cs ) self . _busy_wait_ms ( 2 ) self . _spi . write ( frame ) self . _gpio . set_high ( self . _cs )
Write a frame to the PN532 with the specified data bytearray .
20,566
def _read_data ( self , count ) : frame = bytearray ( count ) frame [ 0 ] = PN532_SPI_DATAREAD self . _gpio . set_low ( self . _cs ) self . _busy_wait_ms ( 2 ) response = self . _spi . transfer ( frame ) self . _gpio . set_high ( self . _cs ) return response
Read a specified count of bytes from the PN532 .
20,567
def _read_frame ( self , length ) : response = self . _read_data ( length + 8 ) logger . debug ( 'Read frame: 0x{0}' . format ( binascii . hexlify ( response ) ) ) if response [ 0 ] != 0x01 : raise RuntimeError ( 'Response frame does not start with 0x01!' ) offset = 1 while response [ offset ] == 0x00 : offset += 1 if offset >= len ( response ) : raise RuntimeError ( 'Response frame preamble does not contain 0x00FF!' ) if response [ offset ] != 0xFF : raise RuntimeError ( 'Response frame preamble does not contain 0x00FF!' ) offset += 1 if offset >= len ( response ) : raise RuntimeError ( 'Response contains no data!' ) frame_len = response [ offset ] if ( frame_len + response [ offset + 1 ] ) & 0xFF != 0 : raise RuntimeError ( 'Response length checksum did not match length!' ) checksum = reduce ( self . _uint8_add , response [ offset + 2 : offset + 2 + frame_len + 1 ] , 0 ) if checksum != 0 : raise RuntimeError ( 'Response checksum did not match expected value!' ) return response [ offset + 2 : offset + 2 + frame_len ]
Read a response frame from the PN532 of at most length bytes in size . Returns the data inside the frame if found otherwise raises an exception if there is an error parsing the frame . Note that less than length bytes might be returned!
20,568
def _wait_ready ( self , timeout_sec = 1 ) : start = time . time ( ) self . _gpio . set_low ( self . _cs ) self . _busy_wait_ms ( 2 ) response = self . _spi . transfer ( [ PN532_SPI_STATREAD , 0x00 ] ) self . _gpio . set_high ( self . _cs ) while response [ 1 ] != PN532_SPI_READY : if time . time ( ) - start >= timeout_sec : return False time . sleep ( 0.01 ) self . _gpio . set_low ( self . _cs ) self . _busy_wait_ms ( 2 ) response = self . _spi . transfer ( [ PN532_SPI_STATREAD , 0x00 ] ) self . _gpio . set_high ( self . _cs ) return True
Wait until the PN532 is ready to receive commands . At most wait timeout_sec seconds for the PN532 to be ready . If the PN532 is ready before the timeout is exceeded then True will be returned otherwise False is returned when the timeout is exceeded .
20,569
def call_function ( self , command , response_length = 0 , params = [ ] , timeout_sec = 1 ) : data = bytearray ( 2 + len ( params ) ) data [ 0 ] = PN532_HOSTTOPN532 data [ 1 ] = command & 0xFF data [ 2 : ] = params self . _write_frame ( data ) if not self . _wait_ready ( timeout_sec ) : return None response = self . _read_data ( len ( PN532_ACK ) ) if response != PN532_ACK : raise RuntimeError ( 'Did not receive expected ACK from PN532!' ) if not self . _wait_ready ( timeout_sec ) : return None response = self . _read_frame ( response_length + 2 ) if not ( response [ 0 ] == PN532_PN532TOHOST and response [ 1 ] == ( command + 1 ) ) : raise RuntimeError ( 'Received unexpected command response!' ) return response [ 2 : ]
Send specified command to the PN532 and expect up to response_length bytes back in a response . Note that less than the expected bytes might be returned! Params can optionally specify an array of bytes to send as parameters to the function call . Will wait up to timeout_secs seconds for a response and return a bytearray of response bytes or None if no response is available within the timeout .
20,570
def begin ( self ) : self . _gpio . set_low ( self . _cs ) time . sleep ( 1.0 ) self . get_firmware_version ( ) self . _gpio . set_high ( self . _cs )
Initialize communication with the PN532 . Must be called before any other calls are made against the PN532 .
20,571
def get_firmware_version ( self ) : response = self . call_function ( PN532_COMMAND_GETFIRMWAREVERSION , 4 ) if response is None : raise RuntimeError ( 'Failed to detect the PN532! Make sure there is sufficient power (use a 1 amp or greater power supply), the PN532 is wired correctly to the device, and the solder joints on the PN532 headers are solidly connected.' ) return ( response [ 0 ] , response [ 1 ] , response [ 2 ] , response [ 3 ] )
Call PN532 GetFirmwareVersion function and return a tuple with the IC Ver Rev and Support values .
20,572
def read_passive_target ( self , card_baud = PN532_MIFARE_ISO14443A , timeout_sec = 1 ) : response = self . call_function ( PN532_COMMAND_INLISTPASSIVETARGET , params = [ 0x01 , card_baud ] , response_length = 17 ) if response is None : return None if response [ 0 ] != 0x01 : raise RuntimeError ( 'More than one card detected!' ) if response [ 5 ] > 7 : raise RuntimeError ( 'Found card with unexpectedly long UID!' ) return response [ 6 : 6 + response [ 5 ] ]
Wait for a MiFare card to be available and return its UID when found . Will wait up to timeout_sec seconds and return None if no card is found otherwise a bytearray with the UID of the found card is returned .
20,573
def mifare_classic_read_block ( self , block_number ) : response = self . call_function ( PN532_COMMAND_INDATAEXCHANGE , params = [ 0x01 , MIFARE_CMD_READ , block_number & 0xFF ] , response_length = 17 ) if response [ 0 ] != 0x00 : return None return response [ 1 : ]
Read a block of data from the card . Block number should be the block to read . If the block is successfully read a bytearray of length 16 with data starting at the specified block will be returned . If the block is not read then None will be returned .
20,574
def mifare_classic_write_block ( self , block_number , data ) : assert data is not None and len ( data ) == 16 , 'Data must be an array of 16 bytes!' params = bytearray ( 19 ) params [ 0 ] = 0x01 params [ 1 ] = MIFARE_CMD_WRITE params [ 2 ] = block_number & 0xFF params [ 3 : ] = data response = self . call_function ( PN532_COMMAND_INDATAEXCHANGE , params = params , response_length = 1 ) return response [ 0 ] == 0x00
Write a block of data to the card . Block number should be the block to write and data should be a byte array of length 16 with the data to write . If the data is successfully written then True is returned otherwise False is returned .
20,575
def _dirmatch ( path , matchwith ) : matchlen = len ( matchwith ) if ( path . startswith ( matchwith ) and path [ matchlen : matchlen + 1 ] in [ os . sep , '' ] ) : return True return False
Check if path is within matchwith s tree .
20,576
def _virtualenv_sys ( venv_path ) : "obtain version and path info from a virtualenv." executable = os . path . join ( venv_path , env_bin_dir , 'python' ) p = subprocess . Popen ( [ executable , '-c' , 'import sys;' 'print (sys.version[:3]);' 'print ("\\n".join(sys.path));' ] , env = { } , stdout = subprocess . PIPE ) stdout , err = p . communicate ( ) assert not p . returncode and stdout lines = stdout . decode ( 'utf-8' ) . splitlines ( ) return lines [ 0 ] , list ( filter ( bool , lines [ 1 : ] ) )
obtain version and path info from a virtualenv .
20,577
def int_to_ef ( n ) : flags = { } for name , value in libarchive . constants . archive_entry . FILETYPES . items ( ) : flags [ name ] = ( n & value ) > 0 return ENTRY_FILETYPE ( ** flags )
This is here for testing support but in practice this isn t very useful as many of the flags are just combinations of other flags . The relationships are defined by the OS in ways that aren t semantically intuitive to this project .
20,578
def _enumerator ( opener , entry_cls , format_code = None , filter_code = None ) : archive_res = _archive_read_new ( ) try : r = _set_read_context ( archive_res , format_code , filter_code ) opener ( archive_res ) def it ( ) : while 1 : with _archive_read_next_header ( archive_res ) as entry_res : if entry_res is None : break e = entry_cls ( archive_res , entry_res ) yield e if e . is_consumed is False : _archive_read_data_skip ( archive_res ) yield it ( ) finally : _archive_read_free ( archive_res )
Return an archive enumerator from a user - defined source using a user - defined entry type .
20,579
def file_enumerator ( filepath , block_size = 10240 , * args , ** kwargs ) : _LOGGER . debug ( "Enumerating through archive file: %s" , filepath ) def opener ( archive_res ) : _LOGGER . debug ( "Opening from file (file_enumerator): %s" , filepath ) _archive_read_open_filename ( archive_res , filepath , block_size ) if 'entry_cls' not in kwargs : kwargs [ 'entry_cls' ] = _ArchiveEntryItReadable return _enumerator ( opener , * args , ** kwargs )
Return an enumerator that knows how to read a physical file .
20,580
def memory_enumerator ( buffer_ , * args , ** kwargs ) : _LOGGER . debug ( "Enumerating through (%d) bytes of archive data." , len ( buffer_ ) ) def opener ( archive_res ) : _LOGGER . debug ( "Opening from (%d) bytes (memory_enumerator)." , len ( buffer_ ) ) _archive_read_open_memory ( archive_res , buffer_ ) if 'entry_cls' not in kwargs : kwargs [ 'entry_cls' ] = _ArchiveEntryItReadable return _enumerator ( opener , * args , ** kwargs )
Return an enumerator that knows how to read raw memory .
20,581
def _pour ( opener , flags = 0 , * args , ** kwargs ) : with _enumerator ( opener , * args , entry_cls = _ArchiveEntryItState , ** kwargs ) as r : ext = libarchive . calls . archive_write . c_archive_write_disk_new ( ) libarchive . calls . archive_write . c_archive_write_disk_set_options ( ext , flags ) for state in r : yield state if state . selected is False : continue r = libarchive . calls . archive_write . c_archive_write_header ( ext , state . entry_res ) buff = ctypes . c_void_p ( ) size = ctypes . c_size_t ( ) offset = ctypes . c_longlong ( ) while 1 : r = libarchive . calls . archive_read . c_archive_read_data_block ( state . reader_res , ctypes . byref ( buff ) , ctypes . byref ( size ) , ctypes . byref ( offset ) ) if r == libarchive . constants . archive . ARCHIVE_EOF : break elif r != libarchive . constants . archive . ARCHIVE_OK : message = c_archive_error_string ( state . reader_res ) raise libarchive . exception . ArchiveError ( "Pour failed: (%d) [%s]" % ( r , message ) ) r = libarchive . calls . archive_write . c_archive_write_data_block ( ext , buff , size , offset ) r = libarchive . calls . archive_write . c_archive_write_finish_entry ( ext )
A flexible pouring facility that knows how to enumerate entry data .
20,582
def file_pour ( filepath , block_size = 10240 , * args , ** kwargs ) : def opener ( archive_res ) : _LOGGER . debug ( "Opening from file (file_pour): %s" , filepath ) _archive_read_open_filename ( archive_res , filepath , block_size ) return _pour ( opener , * args , flags = 0 , ** kwargs )
Write physical files from entries .
20,583
def memory_pour ( buffer_ , * args , ** kwargs ) : def opener ( archive_res ) : _LOGGER . debug ( "Opening from (%d) bytes (memory_pour)." , len ( buffer_ ) ) _archive_read_open_memory ( archive_res , buffer_ ) return _pour ( opener , * args , flags = 0 , ** kwargs )
Yield data from entries .
20,584
def _archive_write_data ( archive , data ) : n = libarchive . calls . archive_write . c_archive_write_data ( archive , ctypes . cast ( ctypes . c_char_p ( data ) , ctypes . c_void_p ) , len ( data ) ) if n == 0 : message = c_archive_error_string ( archive ) raise ValueError ( "No bytes were written. Error? [%s]" % ( message ) )
Write data to archive . This will only be called with a non - empty string .
20,585
def _write_ctrl_meas ( self ) : self . _write_register_byte ( _BME280_REGISTER_CTRL_HUM , self . overscan_humidity ) self . _write_register_byte ( _BME280_REGISTER_CTRL_MEAS , self . _ctrl_meas )
Write the values to the ctrl_meas and ctrl_hum registers in the device ctrl_meas sets the pressure and temperature data acquistion options ctrl_hum sets the humidty oversampling and must be written to first
20,586
def _write_config ( self ) : normal_flag = False if self . _mode == MODE_NORMAL : normal_flag = True self . mode = MODE_SLEEP self . _write_register_byte ( _BME280_REGISTER_CONFIG , self . _config ) if normal_flag : self . mode = MODE_NORMAL
Write the value to the config register in the device
20,587
def _config ( self ) : config = 0 if self . mode == MODE_NORMAL : config += ( self . _t_standby << 5 ) if self . _iir_filter : config += ( self . _iir_filter << 2 ) return config
Value to be written to the device s config register
20,588
def _ctrl_meas ( self ) : ctrl_meas = ( self . overscan_temperature << 5 ) ctrl_meas += ( self . overscan_pressure << 2 ) ctrl_meas += self . mode return ctrl_meas
Value to be written to the device s ctrl_meas register
20,589
def measurement_time_typical ( self ) : meas_time_ms = 1.0 if self . overscan_temperature != OVERSCAN_DISABLE : meas_time_ms += ( 2 * _BME280_OVERSCANS . get ( self . overscan_temperature ) ) if self . overscan_pressure != OVERSCAN_DISABLE : meas_time_ms += ( 2 * _BME280_OVERSCANS . get ( self . overscan_pressure ) + 0.5 ) if self . overscan_humidity != OVERSCAN_DISABLE : meas_time_ms += ( 2 * _BME280_OVERSCANS . get ( self . overscan_humidity ) + 0.5 ) return meas_time_ms
Typical time in milliseconds required to complete a measurement in normal mode
20,590
def pressure ( self ) : self . _read_temperature ( ) adc = self . _read24 ( _BME280_REGISTER_PRESSUREDATA ) / 16 var1 = float ( self . _t_fine ) / 2.0 - 64000.0 var2 = var1 * var1 * self . _pressure_calib [ 5 ] / 32768.0 var2 = var2 + var1 * self . _pressure_calib [ 4 ] * 2.0 var2 = var2 / 4.0 + self . _pressure_calib [ 3 ] * 65536.0 var3 = self . _pressure_calib [ 2 ] * var1 * var1 / 524288.0 var1 = ( var3 + self . _pressure_calib [ 1 ] * var1 ) / 524288.0 var1 = ( 1.0 + var1 / 32768.0 ) * self . _pressure_calib [ 0 ] if var1 == 0 : return 0 if var1 : pressure = 1048576.0 - adc pressure = ( ( pressure - var2 / 4096.0 ) * 6250.0 ) / var1 var1 = self . _pressure_calib [ 8 ] * pressure * pressure / 2147483648.0 var2 = pressure * self . _pressure_calib [ 7 ] / 32768.0 pressure = pressure + ( var1 + var2 + self . _pressure_calib [ 6 ] ) / 16.0 pressure /= 100 if pressure < _BME280_PRESSURE_MIN_HPA : return _BME280_PRESSURE_MIN_HPA if pressure > _BME280_PRESSURE_MAX_HPA : return _BME280_PRESSURE_MAX_HPA return pressure else : return _BME280_PRESSURE_MIN_HPA
The compensated pressure in hectoPascals . returns None if pressure measurement is disabled
20,591
def humidity ( self ) : self . _read_temperature ( ) hum = self . _read_register ( _BME280_REGISTER_HUMIDDATA , 2 ) adc = float ( hum [ 0 ] << 8 | hum [ 1 ] ) var1 = float ( self . _t_fine ) - 76800.0 var2 = ( self . _humidity_calib [ 3 ] * 64.0 + ( self . _humidity_calib [ 4 ] / 16384.0 ) * var1 ) var3 = adc - var2 var4 = self . _humidity_calib [ 1 ] / 65536.0 var5 = ( 1.0 + ( self . _humidity_calib [ 2 ] / 67108864.0 ) * var1 ) var6 = 1.0 + ( self . _humidity_calib [ 5 ] / 67108864.0 ) * var1 * var5 var6 = var3 * var4 * ( var5 * var6 ) humidity = var6 * ( 1.0 - self . _humidity_calib [ 0 ] * var6 / 524288.0 ) if humidity > _BME280_HUMIDITY_MAX : return _BME280_HUMIDITY_MAX if humidity < _BME280_HUMIDITY_MIN : return _BME280_HUMIDITY_MIN return humidity
The relative humidity in RH % returns None if humidity measurement is disabled
20,592
def _read_coefficients ( self ) : coeff = self . _read_register ( _BME280_REGISTER_DIG_T1 , 24 ) coeff = list ( struct . unpack ( '<HhhHhhhhhhhh' , bytes ( coeff ) ) ) coeff = [ float ( i ) for i in coeff ] self . _temp_calib = coeff [ : 3 ] self . _pressure_calib = coeff [ 3 : ] self . _humidity_calib = [ 0 ] * 6 self . _humidity_calib [ 0 ] = self . _read_byte ( _BME280_REGISTER_DIG_H1 ) coeff = self . _read_register ( _BME280_REGISTER_DIG_H2 , 7 ) coeff = list ( struct . unpack ( '<hBBBBb' , bytes ( coeff ) ) ) self . _humidity_calib [ 1 ] = float ( coeff [ 0 ] ) self . _humidity_calib [ 2 ] = float ( coeff [ 1 ] ) self . _humidity_calib [ 3 ] = float ( ( coeff [ 2 ] << 4 ) | ( coeff [ 3 ] & 0xF ) ) self . _humidity_calib [ 4 ] = float ( ( coeff [ 4 ] << 4 ) | ( coeff [ 3 ] >> 4 ) ) self . _humidity_calib [ 5 ] = float ( coeff [ 5 ] )
Read & save the calibration coefficients
20,593
def _read24 ( self , register ) : ret = 0.0 for b in self . _read_register ( register , 3 ) : ret *= 256.0 ret += float ( b & 0xFF ) return ret
Read an unsigned 24 - bit value as a floating point and return it .
20,594
def _create ( self , postData ) : if self . infos is None : r = self . connection . session . post ( self . indexesURL , params = { "collection" : self . collection . name } , data = json . dumps ( postData , default = str ) ) data = r . json ( ) if ( r . status_code >= 400 ) or data [ 'error' ] : raise CreationError ( data [ 'errorMessage' ] , data ) self . infos = data
Creates an index of any type according to postData
20,595
def createVertex ( self , collectionName , docAttributes , waitForSync = False ) : url = "%s/vertex/%s" % ( self . URL , collectionName ) store = DOC . DocumentStore ( self . database [ collectionName ] , validators = self . database [ collectionName ] . _fields , initDct = docAttributes ) store . validate ( ) r = self . connection . session . post ( url , data = json . dumps ( docAttributes , default = str ) , params = { 'waitForSync' : waitForSync } ) data = r . json ( ) if r . status_code == 201 or r . status_code == 202 : return self . database [ collectionName ] [ data [ "vertex" ] [ "_key" ] ] raise CreationError ( "Unable to create vertice, %s" % data [ "errorMessage" ] , data )
adds a vertex to the graph and returns it
20,596
def deleteVertex ( self , document , waitForSync = False ) : url = "%s/vertex/%s" % ( self . URL , document . _id ) r = self . connection . session . delete ( url , params = { 'waitForSync' : waitForSync } ) data = r . json ( ) if r . status_code == 200 or r . status_code == 202 : return True raise DeletionError ( "Unable to delete vertice, %s" % document . _id , data )
deletes a vertex from the graph as well as al linked edges
20,597
def createEdge ( self , collectionName , _fromId , _toId , edgeAttributes , waitForSync = False ) : if not _fromId : raise ValueError ( "Invalid _fromId: %s" % _fromId ) if not _toId : raise ValueError ( "Invalid _toId: %s" % _toId ) if collectionName not in self . definitions : raise KeyError ( "'%s' is not among the edge definitions" % collectionName ) url = "%s/edge/%s" % ( self . URL , collectionName ) self . database [ collectionName ] . validatePrivate ( "_from" , _fromId ) self . database [ collectionName ] . validatePrivate ( "_to" , _toId ) ed = self . database [ collectionName ] . createEdge ( ) ed . set ( edgeAttributes ) ed . validate ( ) payload = ed . getStore ( ) payload . update ( { '_from' : _fromId , '_to' : _toId } ) r = self . connection . session . post ( url , data = json . dumps ( payload , default = str ) , params = { 'waitForSync' : waitForSync } ) data = r . json ( ) if r . status_code == 201 or r . status_code == 202 : return self . database [ collectionName ] [ data [ "edge" ] [ "_key" ] ] raise CreationError ( "Unable to create edge, %s" % r . json ( ) [ "errorMessage" ] , data )
creates an edge between two documents
20,598
def link ( self , definition , doc1 , doc2 , edgeAttributes , waitForSync = False ) : "A shorthand for createEdge that takes two documents as input" if type ( doc1 ) is DOC . Document : if not doc1 . _id : doc1 . save ( ) doc1_id = doc1 . _id else : doc1_id = doc1 if type ( doc2 ) is DOC . Document : if not doc2 . _id : doc2 . save ( ) doc2_id = doc2 . _id else : doc2_id = doc2 return self . createEdge ( definition , doc1_id , doc2_id , edgeAttributes , waitForSync )
A shorthand for createEdge that takes two documents as input
20,599
def unlink ( self , definition , doc1 , doc2 ) : "deletes all links between doc1 and doc2" links = self . database [ definition ] . fetchByExample ( { "_from" : doc1 . _id , "_to" : doc2 . _id } , batchSize = 100 ) for l in links : self . deleteEdge ( l )
deletes all links between doc1 and doc2