idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
39,200
def public ( self , username = None ) : username = username or self . bitbucket . username or '' url = self . bitbucket . url ( 'GET_USER' , username = username ) response = self . bitbucket . dispatch ( 'GET' , url ) try : return ( response [ 0 ] , response [ 1 ] [ 'repositories' ] ) except TypeError : pass return response
Returns all public repositories from an user . If username is not defined tries to return own public repos .
39,201
def all ( self ) : url = self . bitbucket . url ( 'GET_USER' , username = self . bitbucket . username ) response = self . bitbucket . dispatch ( 'GET' , url , auth = self . bitbucket . auth ) try : return ( response [ 0 ] , response [ 1 ] [ 'repositories' ] ) except TypeError : pass return response
Return own repositories .
39,202
def create ( self , repo_name , scm = 'git' , private = True , ** kwargs ) : url = self . bitbucket . url ( 'CREATE_REPO' ) return self . bitbucket . dispatch ( 'POST' , url , auth = self . bitbucket . auth , name = repo_name , scm = scm , is_private = private , ** kwargs )
Creates a new repository on own Bitbucket account and return it .
39,203
def archive ( self , repo_slug = None , format = 'zip' , prefix = '' ) : prefix = '%s' . lstrip ( '/' ) % prefix self . _get_files_in_dir ( repo_slug = repo_slug , dir = '/' ) if self . bitbucket . repo_tree : with NamedTemporaryFile ( delete = False ) as archive : with ZipFile ( archive , 'w' ) as zip_archive : for name , file in self . bitbucket . repo_tree . items ( ) : with NamedTemporaryFile ( delete = False ) as temp_file : temp_file . write ( file . encode ( 'utf-8' ) ) zip_archive . write ( temp_file . name , prefix + name ) return ( True , archive . name ) return ( False , 'Could not archive your project.' )
Get one of your repositories and compress it as an archive . Return the path of the archive .
39,204
def create ( self , issue_id = None , repo_slug = None , ** kwargs ) : issue_id = issue_id or self . issue_id repo_slug = repo_slug or self . bitbucket . repo_slug or '' url = self . bitbucket . url ( 'CREATE_COMMENT' , username = self . bitbucket . username , repo_slug = repo_slug , issue_id = issue_id ) return self . bitbucket . dispatch ( 'POST' , url , auth = self . bitbucket . auth , ** kwargs )
Add an issue comment to one of your repositories . Each issue comment require only the content data field the system autopopulate the rest .
39,205
def delete ( self , comment_id , issue_id = None , repo_slug = None ) : issue_id = issue_id or self . issue_id repo_slug = repo_slug or self . bitbucket . repo_slug or '' url = self . bitbucket . url ( 'DELETE_COMMENT' , username = self . bitbucket . username , repo_slug = repo_slug , issue_id = issue_id , comment_id = comment_id ) return self . bitbucket . dispatch ( 'DELETE' , url , auth = self . bitbucket . auth )
Delete an issue from one of your repositories .
39,206
def all ( self ) : url = self . bitbucket . url ( 'GET_SSH_KEYS' ) return self . bitbucket . dispatch ( 'GET' , url , auth = self . bitbucket . auth )
Get all ssh keys associated with your account .
39,207
def get ( self , key_id = None ) : url = self . bitbucket . url ( 'GET_SSH_KEY' , key_id = key_id ) return self . bitbucket . dispatch ( 'GET' , url , auth = self . bitbucket . auth )
Get one of the ssh keys associated with your account .
39,208
def create ( self , key = None , label = None ) : key = '%s' % key url = self . bitbucket . url ( 'SET_SSH_KEY' ) return self . bitbucket . dispatch ( 'POST' , url , auth = self . bitbucket . auth , key = key , label = label )
Associate an ssh key with your account and return it .
39,209
def delete ( self , key_id = None ) : url = self . bitbucket . url ( 'DELETE_SSH_KEY' , key_id = key_id ) return self . bitbucket . dispatch ( 'DELETE' , url , auth = self . bitbucket . auth )
Delete one of the ssh keys associated with your account . Please use with caution as there is NO confimation and NO undo .
39,210
def make_file_cm ( filename , mode = 'a' ) : @ contextlib . contextmanager def cm ( ) : with open ( filename , mode = mode ) as fh : yield fh return cm
Open a file for appending and yield the open filehandle . Close the filehandle after yielding it . This is useful for creating a context manager for logging the output of a Vagrant instance .
39,211
def halt ( self , vm_name = None , force = False ) : force_opt = '--force' if force else None self . _call_vagrant_command ( [ 'halt' , vm_name , force_opt ] ) self . _cached_conf [ vm_name ] = None
Halt the Vagrant box .
39,212
def _parse_status ( self , output ) : parsed = self . _parse_machine_readable_output ( output ) statuses = [ ] for target , tuples in itertools . groupby ( parsed , lambda tup : tup [ 1 ] ) : info = { kind : data for timestamp , _ , kind , data in tuples } status = Status ( name = target , state = info . get ( 'state' ) , provider = info . get ( 'provider-name' ) ) statuses . append ( status ) return statuses
Unit testing is so much easier when Vagrant is removed from the equation .
39,213
def box_add ( self , name , url , provider = None , force = False ) : force_opt = '--force' if force else None cmd = [ 'box' , 'add' , name , url , force_opt ] if provider is not None : cmd += [ '--provider' , provider ] self . _call_vagrant_command ( cmd )
Adds a box with given name from given url .
39,214
def package ( self , vm_name = None , base = None , output = None , vagrantfile = None ) : cmd = [ 'package' , vm_name ] if output is not None : cmd += [ '--output' , output ] if vagrantfile is not None : cmd += [ '--vagrantfile' , vagrantfile ] self . _call_vagrant_command ( cmd )
Packages a running vagrant environment into a box .
39,215
def snapshot_list ( self ) : NO_SNAPSHOTS_TAKEN = 'No snapshots have been taken yet!' output = self . _run_vagrant_command ( [ 'snapshot' , 'list' ] ) if NO_SNAPSHOTS_TAKEN in output : return [ ] else : return output . splitlines ( )
This command will list all the snapshots taken .
39,216
def _parse_box_list ( self , output ) : boxes = [ ] name = provider = version = None for timestamp , target , kind , data in self . _parse_machine_readable_output ( output ) : if kind == 'box-name' : if name is not None : boxes . append ( Box ( name = name , provider = provider , version = version ) ) name = data provider = version = None elif kind == 'box-provider' : provider = data elif kind == 'box-version' : version = data if name is not None : boxes . append ( Box ( name = name , provider = provider , version = version ) ) return boxes
Remove Vagrant usage for unit testing
39,217
def _parse_plugin_list ( self , output ) : ENCODED_COMMA = '%!(VAGRANT_COMMA)' plugins = [ ] name = None version = None system = False for timestamp , target , kind , data in self . _parse_machine_readable_output ( output ) : if kind == 'plugin-name' : if name is not None : plugins . append ( Plugin ( name = name , version = version , system = system ) ) name = data version = None system = False elif kind == 'plugin-version' : if ENCODED_COMMA in data : version , etc = data . split ( ENCODED_COMMA ) system = ( etc . strip ( ) . lower ( ) == 'system' ) else : version = data system = False if name is not None : plugins . append ( Plugin ( name = name , version = version , system = system ) ) return plugins
Remove Vagrant from the equation for unit testing .
39,218
def _stream_vagrant_command ( self , args ) : py3 = sys . version_info > ( 3 , 0 ) command = self . _make_vagrant_command ( args ) with self . err_cm ( ) as err_fh : sp_args = dict ( args = command , cwd = self . root , env = self . env , stdout = subprocess . PIPE , stderr = err_fh , bufsize = 1 ) p = subprocess . Popen ( ** sp_args ) with p . stdout : for line in iter ( p . stdout . readline , b'' ) : yield compat . decode ( line ) p . wait ( ) if p . returncode != 0 : raise subprocess . CalledProcessError ( p . returncode , command )
Execute a vagrant command returning a generator of the output lines . Caller should consume the entire generator to avoid the hanging the subprocess .
39,219
def sandbox_status ( self , vm_name = None ) : vagrant_sandbox_output = self . _run_sandbox_command ( [ 'status' , vm_name ] ) return self . _parse_vagrant_sandbox_status ( vagrant_sandbox_output )
Returns the status of the sandbox mode .
39,220
def _parse_vagrant_sandbox_status ( self , vagrant_output ) : tokens = [ token . strip ( ) for token in vagrant_output . split ( ' ' ) ] if tokens [ 0 ] == 'Usage:' : sahara_status = 'not installed' elif "{} {}" . format ( tokens [ - 2 ] , tokens [ - 1 ] ) == 'not created' : sahara_status = 'unknown' else : sahara_status = tokens [ - 1 ] return sahara_status
Returns the status of the sandbox mode given output from vagrant sandbox status .
39,221
def deserialize ( obj ) : target = dict ( obj ) class_name = None if '__class__' in target : class_name = target . pop ( '__class__' ) if '__module__' in obj : obj . pop ( '__module__' ) if class_name == 'datetime' : return datetime . datetime ( tzinfo = utc , ** target ) if class_name == 'StreamingBody' : return StringIO ( target [ 'body' ] ) return obj
Convert JSON dicts back into objects .
39,222
def serialize ( obj ) : result = { '__class__' : obj . __class__ . __name__ } try : result [ '__module__' ] = obj . __module__ except AttributeError : pass if isinstance ( obj , datetime . datetime ) : result [ 'year' ] = obj . year result [ 'month' ] = obj . month result [ 'day' ] = obj . day result [ 'hour' ] = obj . hour result [ 'minute' ] = obj . minute result [ 'second' ] = obj . second result [ 'microsecond' ] = obj . microsecond return result if isinstance ( obj , StreamingBody ) : result [ 'body' ] = obj . read ( ) obj . _raw_stream = StringIO ( result [ 'body' ] ) obj . _amount_read = 0 return result raise TypeError ( "Type not serializable" )
Convert objects into JSON structures .
39,223
def _serialize_json ( obj , fp ) : json . dump ( obj , fp , indent = 4 , default = serialize )
Serialize obj as a JSON formatted stream to fp
39,224
def get_serializer ( serializer_format ) : if serializer_format == Format . JSON : return _serialize_json if serializer_format == Format . PICKLE : return _serialize_pickle
Get the serializer for a specific format
39,225
def get_deserializer ( serializer_format ) : if serializer_format == Format . JSON : return _deserialize_json if serializer_format == Format . PICKLE : return _deserialize_pickle
Get the deserializer for a specific format
39,226
def get_next_file_path ( self , service , operation ) : base_name = '{0}.{1}' . format ( service , operation ) if self . prefix : base_name = '{0}.{1}' . format ( self . prefix , base_name ) LOG . debug ( 'get_next_file_path: %s' , base_name ) next_file = None serializer_format = None index = self . _index . setdefault ( base_name , 1 ) while not next_file : file_name = os . path . join ( self . _data_path , base_name + '_{0}' . format ( index ) ) next_file , serializer_format = self . find_file_format ( file_name ) if next_file : self . _index [ base_name ] += 1 elif index != 1 : index = 1 self . _index [ base_name ] = 1 else : raise IOError ( 'response file ({0}.[{1}]) not found' . format ( file_name , "|" . join ( Format . ALLOWED ) ) ) return next_file , serializer_format
Returns a tuple with the next file to read and the serializer format used
39,227
def _mock_request ( self , ** kwargs ) : model = kwargs . get ( 'model' ) service = model . service_model . endpoint_prefix operation = model . name LOG . debug ( '_make_request: %s.%s' , service , operation ) return self . load_response ( service , operation )
A mocked out make_request call that bypasses all network calls and simply returns any mocked responses defined .
39,228
def parse_authentication_request ( self , request_body , http_headers = None ) : auth_req = AuthorizationRequest ( ) . deserialize ( request_body ) for validator in self . authentication_request_validators : validator ( auth_req ) logger . debug ( 'parsed authentication_request: %s' , auth_req ) return auth_req
Parses and verifies an authentication request .
39,229
def authorize ( self , authentication_request , user_id , extra_id_token_claims = None ) : custom_sub = self . userinfo [ user_id ] . get ( 'sub' ) if custom_sub : self . authz_state . subject_identifiers [ user_id ] = { 'public' : custom_sub } sub = custom_sub else : sub = self . _create_subject_identifier ( user_id , authentication_request [ 'client_id' ] , authentication_request [ 'redirect_uri' ] ) self . _check_subject_identifier_matches_requested ( authentication_request , sub ) response = AuthorizationResponse ( ) authz_code = None if 'code' in authentication_request [ 'response_type' ] : authz_code = self . authz_state . create_authorization_code ( authentication_request , sub ) response [ 'code' ] = authz_code access_token_value = None if 'token' in authentication_request [ 'response_type' ] : access_token = self . authz_state . create_access_token ( authentication_request , sub ) access_token_value = access_token . value self . _add_access_token_to_response ( response , access_token ) if 'id_token' in authentication_request [ 'response_type' ] : if extra_id_token_claims is None : extra_id_token_claims = { } elif callable ( extra_id_token_claims ) : extra_id_token_claims = extra_id_token_claims ( user_id , authentication_request [ 'client_id' ] ) requested_claims = self . _get_requested_claims_in ( authentication_request , 'id_token' ) if len ( authentication_request [ 'response_type' ] ) == 1 : requested_claims . update ( scope2claims ( authentication_request [ 'scope' ] , extra_scope_dict = self . extra_scopes ) ) user_claims = self . userinfo . get_claims_for ( user_id , requested_claims ) response [ 'id_token' ] = self . _create_signed_id_token ( authentication_request [ 'client_id' ] , sub , user_claims , authentication_request . get ( 'nonce' ) , authz_code , access_token_value , extra_id_token_claims ) logger . debug ( 'issued id_token=%s from requested_claims=%s userinfo=%s extra_claims=%s' , response [ 'id_token' ] , requested_claims , user_claims , extra_id_token_claims ) if 'state' in authentication_request : response [ 'state' ] = authentication_request [ 'state' ] return response
Creates an Authentication Response for the specified authentication request and local identifier of the authenticated user .
39,230
def _add_access_token_to_response ( self , response , access_token ) : response [ 'access_token' ] = access_token . value response [ 'token_type' ] = access_token . type response [ 'expires_in' ] = access_token . expires_in
Adds the Access Token and the associated parameters to the Token Response .
39,231
def _format_mongodb_uri ( parsed_uri ) : user_pass = '' if parsed_uri . get ( 'username' ) and parsed_uri . get ( 'password' ) : user_pass = '{username!s}:{password!s}@' . format ( ** parsed_uri ) _nodes = [ ] for host , port in parsed_uri . get ( 'nodelist' ) : if ':' in host and not host . endswith ( ']' ) : host = '[{!s}]' . format ( host ) if port == 27017 : _nodes . append ( host ) else : _nodes . append ( '{!s}:{!s}' . format ( host , port ) ) nodelist = ',' . join ( _nodes ) options = '' if parsed_uri . get ( 'options' ) : _opt_list = [ ] for key , value in parsed_uri . get ( 'options' ) . items ( ) : if isinstance ( value , bool ) : value = str ( value ) . lower ( ) _opt_list . append ( '{!s}={!s}' . format ( key , value ) ) options = '?' + '&' . join ( _opt_list ) db_name = parsed_uri . get ( 'database' ) or '' res = "mongodb://{user_pass!s}{nodelist!s}/{db_name!s}{options!s}" . format ( user_pass = user_pass , nodelist = nodelist , db_name = db_name , options = options ) return res
Painstakingly reconstruct a MongoDB URI parsed using pymongo . uri_parser . parse_uri .
39,232
def sanitized_uri ( self ) : if self . _sanitized_uri is None : _parsed = copy . copy ( self . _parsed_uri ) if 'username' in _parsed : _parsed [ 'password' ] = 'secret' _parsed [ 'nodelist' ] = [ _parsed [ 'nodelist' ] [ 0 ] ] self . _sanitized_uri = _format_mongodb_uri ( _parsed ) return self . _sanitized_uri
Return the database URI we re using in a format sensible for logging etc .
39,233
def get_database ( self , database_name = None , username = None , password = None ) : if database_name is None : database_name = self . _database_name if database_name is None : raise ValueError ( 'No database_name supplied, and no default provided to __init__' ) db = self . _connection [ database_name ] if username and password : db . authenticate ( username , password ) elif self . _parsed_uri . get ( "username" , None ) : if 'authSource' in self . _options and self . _options [ 'authSource' ] is not None : db . authenticate ( self . _parsed_uri . get ( "username" , None ) , self . _parsed_uri . get ( "password" , None ) , source = self . _options [ 'authSource' ] ) else : db . authenticate ( self . _parsed_uri . get ( "username" , None ) , self . _parsed_uri . get ( "password" , None ) ) return db
Get a pymongo database handle after authenticating .
39,234
def get_collection ( self , collection , database_name = None , username = None , password = None ) : _db = self . get_database ( database_name , username , password ) return _db [ collection ]
Get a pymongo collection handle .
39,235
def balanced_binary_tree ( n_leaves ) : def _balanced_subtree ( leaves ) : if len ( leaves ) == 1 : return leaves [ 0 ] elif len ( leaves ) == 2 : return ( leaves [ 0 ] , leaves [ 1 ] ) else : split = len ( leaves ) // 2 return ( _balanced_subtree ( leaves [ : split ] ) , _balanced_subtree ( leaves [ split : ] ) ) return _balanced_subtree ( np . arange ( n_leaves ) )
Create a balanced binary tree
39,236
def decision_list ( n_leaves ) : def _list ( leaves ) : if len ( leaves ) == 2 : return ( leaves [ 0 ] , leaves [ 1 ] ) else : return ( leaves [ 0 ] , _list ( leaves [ 1 : ] ) ) return _list ( np . arange ( n_leaves ) )
Create a decision list
39,237
def random_tree ( n_leaves ) : def _random_subtree ( leaves ) : if len ( leaves ) == 1 : return leaves [ 0 ] elif len ( leaves ) == 2 : return ( leaves [ 0 ] , leaves [ 1 ] ) else : split = npr . randint ( 1 , len ( leaves ) - 1 ) return ( _random_subtree ( leaves [ : split ] ) , _random_subtree ( leaves [ split : ] ) ) return _random_subtree ( np . arange ( n_leaves ) )
Randomly partition the nodes
39,238
def leaves ( tree ) : lvs = [ ] def _leaves ( node ) : if np . isscalar ( node ) : lvs . append ( node ) elif isinstance ( node , tuple ) and len ( node ) == 2 : _leaves ( node [ 0 ] ) _leaves ( node [ 1 ] ) else : raise Exception ( "Not a tree!" ) _leaves ( tree ) return lvs
Return the leaves in this subtree .
39,239
def choices ( tree ) : n = len ( leaves ( tree ) ) addr = np . nan * np . ones ( ( n , n - 1 ) ) def _addresses ( node , index , choices ) : if np . isscalar ( node ) : for i , choice in choices : addr [ node , i ] = choice return index elif isinstance ( node , tuple ) and len ( node ) == 2 : newindex = _addresses ( node [ 0 ] , index + 1 , choices + [ ( index , 0 ) ] ) newindex = _addresses ( node [ 1 ] , newindex , choices + [ ( index , 1 ) ] ) return newindex else : raise Exception ( "Not a tree!" ) _addresses ( tree , 0 , [ ] ) return addr
Get the address of each leaf node in terms of internal node choices
39,240
def max_likelihood ( self , data , weights = None , stats = None , lmbda = 0.1 ) : import autograd . numpy as anp from autograd import value_and_grad , hessian_vector_product from scipy . optimize import minimize assert weights is None assert stats is None if not isinstance ( data , list ) : assert isinstance ( data , tuple ) and len ( data ) == 2 data = [ data ] def loglogistic ( psi ) : return psi - anp . log ( 1 + anp . exp ( psi ) ) for n in range ( self . D_out ) : def nll ( abn ) : an , bn = abn [ : - 1 ] , abn [ - 1 ] T = 0 ll = 0 for ( x , y ) in data : T += x . shape [ 0 ] yn = y [ : , n ] psi = anp . dot ( x , an ) + bn ll += anp . sum ( yn * loglogistic ( psi ) ) ll += anp . sum ( ( 1 - yn ) * loglogistic ( - 1. * psi ) ) ll -= lmbda * T * anp . sum ( an ** 2 ) ll -= lmbda * T * bn ** 2 return - 1 * ll / T abn0 = np . concatenate ( ( self . A [ n ] , self . b [ n ] ) ) res = minimize ( value_and_grad ( nll ) , abn0 , tol = 1e-3 , method = "Newton-CG" , jac = True , hessp = hessian_vector_product ( nll ) ) assert res . success self . A [ n ] = res . x [ : - 1 ] self . b [ n ] = res . x [ - 1 ]
As an alternative to MCMC with Polya - gamma augmentation we also implement maximum likelihood learning via gradient descent with autograd . This follows the pybasicbayes convention .
39,241
def resample ( self , data , mask = None , omega = None ) : if not isinstance ( data , list ) : assert isinstance ( data , tuple ) and len ( data ) == 2 , "datas must be an (x,y) tuple or a list of such tuples" data = [ data ] if mask is None : mask = [ np . ones ( y . shape , dtype = bool ) for x , y in data ] if omega is None : omega = self . _resample_auxiliary_variables ( data ) self . A = self . A . copy ( ) self . b = self . b . copy ( ) D = self . D_in for n in range ( self . D_out ) : prior_Sigma = np . zeros ( ( D + 1 , D + 1 ) ) prior_Sigma [ : D , : D ] = self . sigmasq_A [ n ] prior_Sigma [ D , D ] = self . sigmasq_b [ n ] prior_J = np . linalg . inv ( prior_Sigma ) prior_h = prior_J . dot ( np . concatenate ( ( self . mu_A [ n ] , [ self . mu_b [ n ] ] ) ) ) lkhd_h = np . zeros ( D + 1 ) lkhd_J = np . zeros ( ( D + 1 , D + 1 ) ) for d , m , o in zip ( data , mask , omega ) : if isinstance ( d , tuple ) : x , y = d else : x , y = d [ : , : D ] , d [ : , D : ] augx = np . hstack ( ( x , np . ones ( ( x . shape [ 0 ] , 1 ) ) ) ) J = o * m h = self . kappa_func ( y ) * m lkhd_J += ( augx * J [ : , n ] [ : , None ] ) . T . dot ( augx ) lkhd_h += h [ : , n ] . T . dot ( augx ) post_h = prior_h + lkhd_h post_J = prior_J + lkhd_J joint_sample = sample_gaussian ( J = post_J , h = post_h ) self . A [ n , : ] = joint_sample [ : D ] self . b [ n ] = joint_sample [ D ]
Multinomial regression is somewhat special . We have to compute the kappa functions for the entire dataset not just for one column of the data at a time .
39,242
def create_authorization_code ( self , authorization_request , subject_identifier , scope = None ) : if not self . _is_valid_subject_identifier ( subject_identifier ) : raise InvalidSubjectIdentifier ( '{} unknown' . format ( subject_identifier ) ) scope = ' ' . join ( scope or authorization_request [ 'scope' ] ) logger . debug ( 'creating authz code for scope=%s' , scope ) authorization_code = rand_str ( ) authz_info = { 'used' : False , 'exp' : int ( time . time ( ) ) + self . authorization_code_lifetime , 'sub' : subject_identifier , 'granted_scope' : scope , self . KEY_AUTHORIZATION_REQUEST : authorization_request . to_dict ( ) } self . authorization_codes [ authorization_code ] = authz_info logger . debug ( 'new authz_code=%s to client_id=%s for sub=%s valid_until=%s' , authorization_code , authorization_request [ 'client_id' ] , subject_identifier , authz_info [ 'exp' ] ) return authorization_code
Creates an authorization code bound to the authorization request and the authenticated user identified by the subject identifier .
39,243
def create_access_token ( self , authorization_request , subject_identifier , scope = None ) : if not self . _is_valid_subject_identifier ( subject_identifier ) : raise InvalidSubjectIdentifier ( '{} unknown' . format ( subject_identifier ) ) scope = scope or authorization_request [ 'scope' ] return self . _create_access_token ( subject_identifier , authorization_request . to_dict ( ) , ' ' . join ( scope ) )
Creates an access token bound to the authentication request and the authenticated user identified by the subject identifier .
39,244
def _create_access_token ( self , subject_identifier , auth_req , granted_scope , current_scope = None ) : access_token = AccessToken ( rand_str ( ) , self . access_token_lifetime ) scope = current_scope or granted_scope logger . debug ( 'creating access token for scope=%s' , scope ) authz_info = { 'iat' : int ( time . time ( ) ) , 'exp' : int ( time . time ( ) ) + self . access_token_lifetime , 'sub' : subject_identifier , 'client_id' : auth_req [ 'client_id' ] , 'aud' : [ auth_req [ 'client_id' ] ] , 'scope' : scope , 'granted_scope' : granted_scope , 'token_type' : access_token . BEARER_TOKEN_TYPE , self . KEY_AUTHORIZATION_REQUEST : auth_req } self . access_tokens [ access_token . value ] = authz_info logger . debug ( 'new access_token=%s to client_id=%s for sub=%s valid_until=%s' , access_token . value , auth_req [ 'client_id' ] , subject_identifier , authz_info [ 'exp' ] ) return access_token
Creates an access token bound to the subject identifier client id and requested scope .
39,245
def exchange_code_for_token ( self , authorization_code ) : if authorization_code not in self . authorization_codes : raise InvalidAuthorizationCode ( '{} unknown' . format ( authorization_code ) ) authz_info = self . authorization_codes [ authorization_code ] if authz_info [ 'used' ] : logger . debug ( 'detected already used authz_code=%s' , authorization_code ) raise InvalidAuthorizationCode ( '{} has already been used' . format ( authorization_code ) ) elif authz_info [ 'exp' ] < int ( time . time ( ) ) : logger . debug ( 'detected expired authz_code=%s, now=%s > exp=%s ' , authorization_code , int ( time . time ( ) ) , authz_info [ 'exp' ] ) raise InvalidAuthorizationCode ( '{} has expired' . format ( authorization_code ) ) authz_info [ 'used' ] = True access_token = self . _create_access_token ( authz_info [ 'sub' ] , authz_info [ self . KEY_AUTHORIZATION_REQUEST ] , authz_info [ 'granted_scope' ] ) logger . debug ( 'authz_code=%s exchanged to access_token=%s' , authorization_code , access_token . value ) return access_token
Exchanges an authorization code for an access token .
39,246
def create_refresh_token ( self , access_token_value ) : if access_token_value not in self . access_tokens : raise InvalidAccessToken ( '{} unknown' . format ( access_token_value ) ) if not self . refresh_token_lifetime : logger . debug ( 'no refresh token issued for for access_token=%s' , access_token_value ) return None refresh_token = rand_str ( ) authz_info = { 'access_token' : access_token_value , 'exp' : int ( time . time ( ) ) + self . refresh_token_lifetime } self . refresh_tokens [ refresh_token ] = authz_info logger . debug ( 'issued refresh_token=%s expiring=%d for access_token=%s' , refresh_token , authz_info [ 'exp' ] , access_token_value ) return refresh_token
Creates an refresh token bound to the specified access token .
39,247
def _psi_n ( x , n , b ) : return 2 ** ( b - 1 ) / gamma ( b ) * ( - 1 ) ** n * np . exp ( gammaln ( n + b ) - gammaln ( n + 1 ) + np . log ( 2 * n + b ) - 0.5 * np . log ( 2 * np . pi * x ** 3 ) - ( 2 * n + b ) ** 2 / ( 8. * x ) )
Compute the n - th term in the infinite sum of the Jacobi density .
39,248
def _tilt ( omega , b , psi ) : return np . cosh ( psi / 2.0 ) ** b * np . exp ( - psi ** 2 / 2.0 * omega )
Compute the tilt of the PG density for value omega and tilt psi .
39,249
def run_from_argv ( self , argv ) : self . test_runner = test_runner_class super ( Command , self ) . run_from_argv ( argv )
Set the default Gherkin test runner for its options to be parsed .
39,250
def handle ( self , * test_labels , ** options ) : if not options . get ( 'testrunner' , None ) : options [ 'testrunner' ] = test_runner_class return super ( Command , self ) . handle ( * test_labels , ** options )
Set the default Gherkin test runner .
39,251
def django_url ( step , url = None ) : base_url = step . test . live_server_url if url : return urljoin ( base_url , url ) else : return base_url
The URL for a page from the test server .
39,252
def namespace ( self , namespace , to = None ) : fields = get_apphook_field_names ( self . model ) if not fields : raise ValueError ( ugettext ( 'Can\'t find any relation to an ApphookConfig model in {0}' ) . format ( self . model . __name__ ) ) if to and to not in fields : raise ValueError ( ugettext ( 'Can\'t find relation to ApphookConfig model named ' '"{0}" in "{1}"' ) . format ( to , self . model . __name__ ) ) if len ( fields ) > 1 and to not in fields : raise ValueError ( ugettext ( '"{0}" has {1} relations to an ApphookConfig model.' ' Please, specify which one to use in argument "to".' ' Choices are: {2}' ) . format ( self . model . __name__ , len ( fields ) , ', ' . join ( fields ) ) ) else : if not to : to = fields [ 0 ] lookup = '{0}__namespace' . format ( to ) kwargs = { lookup : namespace } return self . filter ( ** kwargs )
Filter by namespace . Try to guess which field to use in lookup . Accept to argument if you need to specify .
39,253
def _app_config_select ( self , request , obj ) : if not obj and not request . GET . get ( self . app_config_attribute , False ) : config_model = get_apphook_model ( self . model , self . app_config_attribute ) if config_model . objects . count ( ) == 1 : return config_model . objects . first ( ) return None elif obj and getattr ( obj , self . app_config_attribute , False ) : return getattr ( obj , self . app_config_attribute ) elif request . GET . get ( self . app_config_attribute , False ) : config_model = get_apphook_model ( self . model , self . app_config_attribute ) return config_model . objects . get ( pk = int ( request . GET . get ( self . app_config_attribute , False ) ) ) return False
Return the select value for apphook configs
39,254
def _set_config_defaults ( self , request , form , obj = None ) : for config_option , field in self . app_config_values . items ( ) : if field in form . base_fields : form . base_fields [ field ] . initial = self . get_config_data ( request , obj , config_option ) return form
Cycle through app_config_values and sets the form value according to the options in the current apphook config .
39,255
def get_config_data ( self , request , obj , name ) : return_value = None config = None if obj : try : config = getattr ( obj , self . app_config_attribute , False ) except ObjectDoesNotExist : pass if not config and self . app_config_attribute in request . GET : config_model = get_apphook_model ( self . model , self . app_config_attribute ) try : config = config_model . objects . get ( pk = request . GET [ self . app_config_attribute ] ) except config_model . DoesNotExist : pass if config : return_value = getattr ( config , name ) return return_value
Method that retrieves a configuration option for a specific AppHookConfig instance
39,256
def get_form ( self , request , obj = None , ** kwargs ) : form = super ( ModelAppHookConfig , self ) . get_form ( request , obj , ** kwargs ) if self . app_config_attribute not in form . base_fields : return form app_config_default = self . _app_config_select ( request , obj ) if app_config_default : form . base_fields [ self . app_config_attribute ] . initial = app_config_default get = copy . copy ( request . GET ) get [ self . app_config_attribute ] = app_config_default . pk request . GET = get elif app_config_default is None and request . method == 'GET' : class InitialForm ( form ) : class Meta ( form . Meta ) : fields = ( self . app_config_attribute , ) form = InitialForm form = self . _set_config_defaults ( request , form , obj ) return form
Provides a flexible way to get the right form according to the context
39,257
def _models_generator ( ) : for app in apps . get_app_configs ( ) : for model in app . get_models ( ) : yield ( str ( model . _meta . verbose_name ) . lower ( ) , model ) yield ( str ( model . _meta . verbose_name_plural ) . lower ( ) , model )
Build a hash of model verbose names to models
39,258
def get_model ( name ) : model = MODELS . get ( name . lower ( ) , None ) assert model , "Could not locate model by name '%s'" % name return model
Convert a model s verbose name to the model class . This allows us to use the models verbose name in steps .
39,259
def reset_sequence ( model ) : sql = connection . ops . sequence_reset_sql ( no_style ( ) , [ model ] ) for cmd in sql : connection . cursor ( ) . execute ( cmd )
Reset the ID sequence for a model .
39,260
def _dump_model ( model , attrs = None ) : fields = [ ] for field in model . _meta . fields : fields . append ( ( field . name , str ( getattr ( model , field . name ) ) ) ) if attrs is not None : for attr in attrs : fields . append ( ( attr , str ( getattr ( model , attr ) ) ) ) for field in model . _meta . many_to_many : vals = getattr ( model , field . name ) fields . append ( ( field . name , '{val} ({count})' . format ( val = ', ' . join ( map ( str , vals . all ( ) ) ) , count = vals . count ( ) , ) ) ) print ( ', ' . join ( '{0}={1}' . format ( field , value ) for field , value in fields ) )
Dump the model fields for debugging .
39,261
def _model_exists_step ( self , model , should_exist ) : model = get_model ( model ) data = guess_types ( self . hashes ) queryset = model . objects try : existence_check = _TEST_MODEL [ model ] except KeyError : existence_check = test_existence failed = 0 try : for hash_ in data : match = existence_check ( queryset , hash_ ) if should_exist : assert match , "%s does not exist: %s" % ( model . __name__ , hash_ ) else : assert not match , "%s exists: %s" % ( model . __name__ , hash_ ) except AssertionError as exc : print ( exc ) failed += 1 if failed : print ( "Rows in DB are:" ) for existing_model in queryset . all ( ) : _dump_model ( existing_model , attrs = [ k [ 1 : ] for k in data [ 0 ] . keys ( ) if k . startswith ( '@' ) ] ) if should_exist : raise AssertionError ( "%i rows missing" % failed ) else : raise AssertionError ( "%i rows found" % failed )
Test for the existence of a model matching the given data .
39,262
def _write_models_step ( self , model , field = None ) : model = get_model ( model ) data = guess_types ( self . hashes ) try : func = _WRITE_MODEL [ model ] except KeyError : func = partial ( write_models , model ) func ( data , field )
Write or update a model .
39,263
def _create_models_for_relation_step ( self , rel_model_name , rel_key , rel_value , model ) : model = get_model ( model ) lookup = { rel_key : rel_value } rel_model = get_model ( rel_model_name ) . objects . get ( ** lookup ) data = guess_types ( self . hashes ) for hash_ in data : hash_ [ '%s' % rel_model_name ] = rel_model try : func = _WRITE_MODEL [ model ] except KeyError : func = partial ( write_models , model ) func ( data , None )
Create a new model linked to the given model .
39,264
def _create_m2m_links_step ( self , rel_model_name , rel_key , rel_value , relation_name ) : lookup = { rel_key : rel_value } rel_model = get_model ( rel_model_name ) . objects . get ( ** lookup ) relation = None for m2m in rel_model . _meta . many_to_many : if relation_name in ( m2m . name , m2m . verbose_name ) : relation = getattr ( rel_model , m2m . name ) break if not relation : try : relation = getattr ( rel_model , relation_name ) except AttributeError : pass assert relation , "%s does not have a many-to-many relation named '%s'" % ( rel_model . _meta . verbose_name . capitalize ( ) , relation_name , ) m2m_model = relation . model for hash_ in self . hashes : relation . add ( m2m_model . objects . get ( ** hash_ ) )
Link many - to - many models together .
39,265
def _model_count_step ( self , count , model ) : model = get_model ( model ) expected = int ( count ) found = model . objects . count ( ) assert found == expected , "Expected %d %s, found %d." % ( expected , model . _meta . verbose_name_plural , found )
Count the number of models in the database .
39,266
def mail_sent_count ( self , count ) : expected = int ( count ) actual = len ( mail . outbox ) assert expected == actual , "Expected to send {0} email(s), got {1}." . format ( expected , actual )
Test that count mails have been sent .
39,267
def dump_emails ( part ) : print ( "Sent emails:" ) for email in mail . outbox : print ( getattr ( email , part ) )
Show the sent emails tested parts to aid in debugging .
39,268
def namespace_url ( context , view_name , * args , ** kwargs ) : namespace = kwargs . pop ( 'namespace' , None ) if not namespace : namespace , __ = get_app_instance ( context [ 'request' ] ) if namespace : namespace += ':' reverse = partial ( urls . reverse , '{0:s}{1:s}' . format ( namespace , view_name ) ) if 'default' not in kwargs : if kwargs : return reverse ( kwargs = kwargs ) elif args : return reverse ( args = args ) else : return reverse ( ) default = kwargs . pop ( 'default' , None ) try : if kwargs : return reverse ( kwargs = kwargs ) elif args : return reverse ( args = args ) else : return reverse ( ) except urls . NoReverseMatch : return default
Returns an absolute URL matching named view with its parameters and the provided application instance namespace .
39,269
def get_app_instance ( request ) : app = None if getattr ( request , 'current_page' , None ) and request . current_page . application_urls : app = apphook_pool . get_apphook ( request . current_page . application_urls ) if app and app . app_config : try : config = None with override ( get_language_from_request ( request , check_path = True ) ) : namespace = resolve ( request . path_info ) . namespace config = app . get_config ( namespace ) return namespace , config except Resolver404 : pass return '' , None
Returns a tuple containing the current namespace and the AppHookConfig instance
39,270
def setup_config ( form_class , config_model = None ) : if config_model is None : return setup_config ( form_class , form_class . model ) app_registry . register ( 'config' , AppDataContainer . from_form ( form_class ) , config_model )
Register the provided form as config form for the provided config model
39,271
def _get_apphook_field_names ( model ) : from . models import AppHookConfig fields = [ ] for field in model . _meta . fields : if isinstance ( field , ForeignKey ) and issubclass ( field . remote_field . model , AppHookConfig ) : fields . append ( field ) return [ field . name for field in fields ]
Return all foreign key field names for a AppHookConfig based model
39,272
def get_apphook_field_names ( model ) : key = APP_CONFIG_FIELDS_KEY . format ( app_label = model . _meta . app_label , model_name = model . _meta . object_name ) . lower ( ) if not hasattr ( model , key ) : field_names = _get_apphook_field_names ( model ) setattr ( model , key , field_names ) return getattr ( model , key )
Cache app - hook field names on model
39,273
def get_apphook_configs ( obj ) : keys = get_apphook_field_names ( obj ) return [ getattr ( obj , key ) for key in keys ] if keys else [ ]
Get apphook configs for an object obj
39,274
def data_received ( self , data ) : _LOGGER . debug ( "Starting: data_received" ) _LOGGER . debug ( 'Received %d bytes from PLM: %s' , len ( data ) , binascii . hexlify ( data ) ) self . _buffer . put_nowait ( data ) asyncio . ensure_future ( self . _peel_messages_from_buffer ( ) , loop = self . _loop ) _LOGGER . debug ( "Finishing: data_received" )
Receive data from the protocol .
39,275
def connection_lost ( self , exc ) : if exc is None : _LOGGER . warning ( 'End of file received from Insteon Modem' ) else : _LOGGER . warning ( 'Lost connection to Insteon Modem: %s' , exc ) self . transport = None asyncio . ensure_future ( self . pause_writing ( ) , loop = self . loop ) if self . _connection_lost_callback : self . _connection_lost_callback ( )
Reestablish the connection to the transport .
39,276
def add_all_link_done_callback ( self , callback ) : _LOGGER . debug ( 'Added new callback %s ' , callback ) self . _cb_load_all_link_db_done . append ( callback )
Register a callback to be invoked when the ALDB is loaded .
39,277
def add_device_not_active_callback ( self , callback ) : _LOGGER . debug ( 'Added new callback %s ' , callback ) self . _cb_device_not_active . append ( callback )
Register callback to be invoked when a device is not responding .
39,278
def poll_devices ( self ) : for addr in self . devices : device = self . devices [ addr ] if not device . address . is_x10 : device . async_refresh_state ( )
Request status updates from each device .
39,279
def send_msg ( self , msg , wait_nak = True , wait_timeout = WAIT_TIMEOUT ) : msg_info = MessageInfo ( msg = msg , wait_nak = wait_nak , wait_timeout = wait_timeout ) _LOGGER . debug ( "Queueing msg: %s" , msg ) self . _send_queue . put_nowait ( msg_info )
Place a message on the send queue for sending .
39,280
def start_all_linking ( self , mode , group ) : msg = StartAllLinking ( mode , group ) self . send_msg ( msg )
Put the IM into All - Linking mode .
39,281
def add_x10_device ( self , housecode , unitcode , feature = 'OnOff' ) : device = insteonplm . devices . create_x10 ( self , housecode , unitcode , feature ) if device : self . devices [ device . address . id ] = device return device
Add an X10 device based on a feature description .
39,282
def device_not_active ( self , addr ) : self . aldb_device_handled ( addr ) for callback in self . _cb_device_not_active : callback ( addr )
Handle inactive devices .
39,283
def aldb_device_handled ( self , addr ) : if isinstance ( addr , Address ) : remove_addr = addr . id else : remove_addr = addr try : self . _aldb_devices . pop ( remove_addr ) _LOGGER . debug ( 'Removed ALDB device %s' , remove_addr ) except KeyError : _LOGGER . debug ( 'Device %s not in ALDB device list' , remove_addr ) _LOGGER . debug ( 'ALDB device count: %d' , len ( self . _aldb_devices ) )
Remove device from ALDB device list .
39,284
async def pause_writing ( self ) : self . _restart_writer = False if self . _writer_task : self . _writer_task . remove_done_callback ( self . restart_writing ) self . _writer_task . cancel ( ) await self . _writer_task await asyncio . sleep ( 0 , loop = self . _loop )
Pause writing .
39,285
def _get_plm_info ( self ) : _LOGGER . info ( 'Requesting Insteon Modem Info' ) msg = GetImInfo ( ) self . send_msg ( msg , wait_nak = True , wait_timeout = .5 )
Request PLM Info .
39,286
def _load_all_link_database ( self ) : _LOGGER . debug ( "Starting: _load_all_link_database" ) self . devices . state = 'loading' self . _get_first_all_link_record ( ) _LOGGER . debug ( "Ending: _load_all_link_database" )
Load the ALL - Link Database into object .
39,287
def _get_first_all_link_record ( self ) : _LOGGER . debug ( "Starting: _get_first_all_link_record" ) _LOGGER . info ( 'Requesting ALL-Link Records' ) if self . aldb . status == ALDBStatus . LOADED : self . _next_all_link_rec_nak_retries = 3 self . _handle_get_next_all_link_record_nak ( None ) return self . aldb . clear ( ) self . _next_all_link_rec_nak_retries = 0 msg = GetFirstAllLinkRecord ( ) self . send_msg ( msg , wait_nak = True , wait_timeout = .5 ) _LOGGER . debug ( "Ending: _get_first_all_link_record" )
Request first ALL - Link record .
39,288
def _get_next_all_link_record ( self ) : _LOGGER . debug ( "Starting: _get_next_all_link_record" ) _LOGGER . debug ( "Requesting Next All-Link Record" ) msg = GetNextAllLinkRecord ( ) self . send_msg ( msg , wait_nak = True , wait_timeout = .5 ) _LOGGER . debug ( "Ending: _get_next_all_link_record" )
Request next ALL - Link record .
39,289
def x10_all_units_off ( self , housecode ) : if isinstance ( housecode , str ) : housecode = housecode . upper ( ) else : raise TypeError ( 'Housecode must be a string' ) msg = X10Send . command_msg ( housecode , X10_COMMAND_ALL_UNITS_OFF ) self . send_msg ( msg ) self . _x10_command_to_device ( housecode , X10_COMMAND_ALL_UNITS_OFF , msg )
Send the X10 All Units Off command .
39,290
def connection_made ( self , transport ) : _LOGGER . info ( 'Connection established to PLM' ) self . transport = transport self . _restart_writer = True self . restart_writing ( ) self . transport . serial . timeout = 1 self . transport . serial . write_timeout = 1 self . transport . set_write_buffer_limits ( 128 ) if self . _aldb . status != ALDBStatus . LOADED : asyncio . ensure_future ( self . _setup_devices ( ) , loop = self . _loop )
Start the PLM connection process .
39,291
def connection_made ( self , transport ) : _LOGGER . info ( 'Connection established to Hub' ) _LOGGER . debug ( 'Transport: %s' , transport ) self . transport = transport self . _restart_writer = True self . restart_writing ( ) if self . _aldb . status != ALDBStatus . LOADED : asyncio . ensure_future ( self . _setup_devices ( ) , loop = self . _loop )
Start the Hub connection process .
39,292
def create ( plm , address , cat , subcat , firmware = None ) : from insteonplm . devices . ipdb import IPDB ipdb = IPDB ( ) product = ipdb [ [ cat , subcat ] ] deviceclass = product . deviceclass device = None if deviceclass is not None : device = deviceclass ( plm , address , cat , subcat , product . product_key , product . description , product . model ) return device
Create a device from device info data .
39,293
def create_x10 ( plm , housecode , unitcode , feature ) : from insteonplm . devices . ipdb import IPDB ipdb = IPDB ( ) product = ipdb . x10 ( feature ) deviceclass = product . deviceclass device = None if deviceclass : device = deviceclass ( plm , housecode , unitcode ) return device
Create an X10 device from a feature definition .
39,294
def id_request ( self ) : import inspect curframe = inspect . currentframe ( ) calframe = inspect . getouterframes ( curframe , 2 ) _LOGGER . debug ( 'caller name: %s' , calframe [ 1 ] [ 3 ] ) msg = StandardSend ( self . address , COMMAND_ID_REQUEST_0X10_0X00 ) self . _plm . send_msg ( msg )
Request a device ID from a device .
39,295
def product_data_request ( self ) : msg = StandardSend ( self . _address , COMMAND_PRODUCT_DATA_REQUEST_0X03_0X00 ) self . _send_msg ( msg )
Request product data from a device .
39,296
def assign_to_all_link_group ( self , group = 0x01 ) : msg = StandardSend ( self . _address , COMMAND_ASSIGN_TO_ALL_LINK_GROUP_0X01_NONE , cmd2 = group ) self . _send_msg ( msg )
Assign a device to an All - Link Group .
39,297
def delete_from_all_link_group ( self , group ) : msg = StandardSend ( self . _address , COMMAND_DELETE_FROM_ALL_LINK_GROUP_0X02_NONE , cmd2 = group ) self . _send_msg ( msg )
Delete a device to an All - Link Group .
39,298
def enter_linking_mode ( self , group = 0x01 ) : msg = ExtendedSend ( self . _address , COMMAND_ENTER_LINKING_MODE_0X09_NONE , cmd2 = group , userdata = Userdata ( ) ) msg . set_checksum ( ) self . _send_msg ( msg )
Tell a device to enter All - Linking Mode .
39,299
def enter_unlinking_mode ( self , group ) : msg = StandardSend ( self . _address , COMMAND_ENTER_UNLINKING_MODE_0X0A_NONE , cmd2 = group ) self . _send_msg ( msg )
Unlink a device from an All - Link group .