idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
27,200
def rbac_policy_create ( request , ** kwargs ) : body = { 'rbac_policy' : kwargs } rbac_policy = neutronclient ( request ) . create_rbac_policy ( body = body ) . get ( 'rbac_policy' ) return RBACPolicy ( rbac_policy )
Create a RBAC Policy .
27,201
def rbac_policy_list ( request , ** kwargs ) : policies = neutronclient ( request ) . list_rbac_policies ( ** kwargs ) . get ( 'rbac_policies' ) return [ RBACPolicy ( p ) for p in policies ]
List of RBAC Policies .
27,202
def rbac_policy_update ( request , policy_id , ** kwargs ) : body = { 'rbac_policy' : kwargs } rbac_policy = neutronclient ( request ) . update_rbac_policy ( policy_id , body = body ) . get ( 'rbac_policy' ) return RBACPolicy ( rbac_policy )
Update a RBAC Policy .
27,203
def rbac_policy_get ( request , policy_id , ** kwargs ) : policy = neutronclient ( request ) . show_rbac_policy ( policy_id , ** kwargs ) . get ( 'rbac_policy' ) return RBACPolicy ( policy )
Get RBAC policy for a given policy id .
27,204
def list ( self , ** params ) : tenant_id = params . pop ( 'tenant_id' , self . request . user . tenant_id ) if tenant_id : params [ 'tenant_id' ] = tenant_id return self . _list ( ** params )
Fetches a list all security groups .
27,205
def _sg_name_dict ( self , sg_id , rules ) : related_ids = set ( [ sg_id ] ) related_ids |= set ( filter ( None , [ r [ 'remote_group_id' ] for r in rules ] ) ) related_sgs = self . client . list_security_groups ( id = related_ids , fields = [ 'id' , 'name' ] ) related_sgs = related_sgs . get ( 'security_groups' ) retu...
Create a mapping dict from secgroup id to its name .
27,206
def get ( self , sg_id ) : secgroup = self . client . show_security_group ( sg_id ) . get ( 'security_group' ) sg_dict = self . _sg_name_dict ( sg_id , secgroup [ 'security_group_rules' ] ) return SecurityGroup ( secgroup , sg_dict )
Fetches the security group .
27,207
def rule_create ( self , parent_group_id , direction = None , ethertype = None , ip_protocol = None , from_port = None , to_port = None , cidr = None , group_id = None , description = None ) : if not cidr : cidr = None if isinstance ( from_port , int ) and from_port < 0 : from_port = None if isinstance ( to_port , int ...
Create a new security group rule .
27,208
def list_by_instance ( self , instance_id ) : ports = port_list ( self . request , device_id = instance_id ) sg_ids = [ ] for p in ports : sg_ids += p . security_groups return self . _list ( id = set ( sg_ids ) ) if sg_ids else [ ]
Gets security groups of an instance .
27,209
def update_instance_security_group ( self , instance_id , new_security_group_ids ) : ports = port_list ( self . request , device_id = instance_id ) for p in ports : params = { 'security_groups' : new_security_group_ids } port_update ( self . request , p . id , ** params )
Update security groups of a specified instance .
27,210
def list_pools ( self ) : search_opts = { 'router:external' : True } return [ FloatingIpPool ( pool ) for pool in self . client . list_networks ( ** search_opts ) . get ( 'networks' ) ]
Fetches a list of all floating IP pools .
27,211
def list ( self , all_tenants = False , ** search_opts ) : if not all_tenants : tenant_id = self . request . user . tenant_id search_opts [ 'tenant_id' ] = tenant_id port_search_opts = { 'tenant_id' : tenant_id } else : port_search_opts = { } fips = self . client . list_floatingips ( ** search_opts ) fips = fips . get ...
Fetches a list of all floating IPs .
27,212
def get ( self , floating_ip_id ) : fip = self . client . show_floatingip ( floating_ip_id ) . get ( 'floatingip' ) self . _set_instance_info ( fip ) return FloatingIp ( fip )
Fetches the floating IP .
27,213
def allocate ( self , pool , tenant_id = None , ** params ) : if not tenant_id : tenant_id = self . request . user . project_id create_dict = { 'floating_network_id' : pool , 'tenant_id' : tenant_id } if 'subnet_id' in params : create_dict [ 'subnet_id' ] = params [ 'subnet_id' ] if 'floating_ip_address' in params : cr...
Allocates a floating IP to the tenant .
27,214
def associate ( self , floating_ip_id , port_id ) : pid , ip_address = port_id . split ( '_' , 1 ) update_dict = { 'port_id' : pid , 'fixed_ip_address' : ip_address } self . client . update_floatingip ( floating_ip_id , { 'floatingip' : update_dict } )
Associates the floating IP to the port .
27,215
def list_targets ( self ) : tenant_id = self . request . user . tenant_id ports = port_list ( self . request , tenant_id = tenant_id ) servers , has_more = nova . server_list ( self . request , detailed = False ) server_dict = collections . OrderedDict ( [ ( s . id , s . name ) for s in servers ] ) reachable_subnets = ...
Returns a list of association targets of instance VIFs .
27,216
def list_targets_by_instance ( self , instance_id , target_list = None ) : if target_list is not None : return [ target for target in target_list if target [ 'instance_id' ] == instance_id ] else : ports = self . _target_ports_by_instance ( instance_id ) reachable_subnets = self . _get_reachable_subnets ( ports , fetch...
Returns a list of FloatingIpTarget objects of FIP association .
27,217
def require_perms ( view_func , required ) : from horizon . exceptions import NotAuthorized current_perms = getattr ( view_func , '_required_perms' , set ( [ ] ) ) view_func . _required_perms = current_perms | set ( required ) @ functools . wraps ( view_func , assigned = available_attrs ( view_func ) ) def dec ( reques...
Enforces permission - based access controls .
27,218
def require_component_access ( view_func , component ) : from horizon . exceptions import NotAuthorized @ functools . wraps ( view_func , assigned = available_attrs ( view_func ) ) def dec ( request , * args , ** kwargs ) : if not component . can_access ( { 'request' : request } ) : raise NotAuthorized ( _ ( "You are n...
Perform component can_access check to access the view .
27,219
def image_get ( request , image_id ) : image = glanceclient ( request ) . images . get ( image_id ) return Image ( image )
Returns an Image object populated with metadata for a given image .
27,220
def image_list_detailed ( request , marker = None , sort_dir = 'desc' , sort_key = 'created_at' , filters = None , paginate = False , reversed_order = False , ** kwargs ) : limit = getattr ( settings , 'API_RESULT_LIMIT' , 1000 ) page_size = utils . get_page_size ( request ) if paginate : request_size = page_size + 1 e...
Thin layer above glanceclient for handling pagination issues .
27,221
def create_image_metadata ( data ) : meta = { 'protected' : data . get ( 'protected' , False ) , 'disk_format' : data . get ( 'disk_format' , 'raw' ) , 'container_format' : data . get ( 'container_format' , 'bare' ) , 'min_disk' : data . get ( 'min_disk' ) or 0 , 'min_ram' : data . get ( 'min_ram' ) or 0 , 'name' : dat...
Generate metadata dict for a new image from a given form data .
27,222
def image_create ( request , ** kwargs ) : data = kwargs . pop ( 'data' , None ) location = None if VERSIONS . active >= 2 : location = kwargs . pop ( 'location' , None ) image = glanceclient ( request ) . images . create ( ** kwargs ) if location is not None : glanceclient ( request ) . images . add_location ( image ....
Create image .
27,223
def image_update_properties ( request , image_id , remove_props = None , ** kwargs ) : return glanceclient ( request , '2' ) . images . update ( image_id , remove_props , ** kwargs )
Add or update a custom property of an image .
27,224
def image_delete_properties ( request , image_id , keys ) : return glanceclient ( request , '2' ) . images . update ( image_id , keys )
Delete custom properties for an image .
27,225
def filter_properties_target ( namespaces_iter , resource_types , properties_target ) : def filter_namespace ( namespace ) : for asn in namespace . get ( 'resource_type_associations' ) : if ( asn . get ( 'name' ) in resource_types and asn . get ( 'properties_target' ) == properties_target ) : return True return False r...
Filter metadata namespaces .
27,226
def metadefs_namespace_list ( request , filters = None , sort_dir = 'asc' , sort_key = 'namespace' , marker = None , paginate = False ) : if get_version ( ) < 2 : return [ ] , False , False if filters is None : filters = { } limit = getattr ( settings , 'API_RESULT_LIMIT' , 1000 ) page_size = utils . get_page_size ( re...
Retrieve a listing of Namespaces
27,227
def discover_files ( base_path , sub_path = '' , ext = '' , trim_base_path = False ) : file_list = [ ] for root , dirs , files in walk ( path . join ( base_path , sub_path ) ) : if trim_base_path : root = path . relpath ( root , base_path ) file_list . extend ( [ path . join ( root , file_name ) for file_name in files ...
Discovers all files with certain extension in given paths .
27,228
def sort_js_files ( js_files ) : modules = [ f for f in js_files if f . endswith ( MODULE_EXT ) ] mocks = [ f for f in js_files if f . endswith ( MOCK_EXT ) ] specs = [ f for f in js_files if f . endswith ( SPEC_EXT ) ] other_sources = [ f for f in js_files if ( not f . endswith ( MODULE_EXT ) and not f . endswith ( MO...
Sorts JavaScript files in js_files .
27,229
def discover_static_files ( base_path , sub_path = '' ) : js_files = discover_files ( base_path , sub_path = sub_path , ext = '.js' , trim_base_path = True ) sources , mocks , specs = sort_js_files ( js_files ) html_files = discover_files ( base_path , sub_path = sub_path , ext = '.html' , trim_base_path = True ) p = p...
Discovers static files in given paths .
27,230
def _log ( file_list , list_name , in_path ) : file_names = '\n' . join ( file_list ) LOG . debug ( "\nDiscovered %(size)d %(name)s file(s) in %(path)s:\n" "%(files)s\n" , { 'size' : len ( file_list ) , 'name' : list_name , 'path' : in_path , 'files' : file_names } )
Logs result at debug level
27,231
def set_tables ( self , tables ) : self . navigation_table = tables [ self . navigation_table_class . _meta . name ] self . content_table = tables [ self . content_table_class . _meta . name ] navigation_item = self . kwargs . get ( self . navigation_kwarg_name ) content_path = self . kwargs . get ( self . content_kwar...
Sets the table instances on the browser .
27,232
def get_account_history ( self , account_id , ** kwargs ) : endpoint = '/accounts/{}/ledger' . format ( account_id ) return self . _send_paginated_message ( endpoint , params = kwargs )
List account activity . Account activity either increases or decreases your account balance .
27,233
def get_account_holds ( self , account_id , ** kwargs ) : endpoint = '/accounts/{}/holds' . format ( account_id ) return self . _send_paginated_message ( endpoint , params = kwargs )
Get holds on an account .
27,234
def buy ( self , product_id , order_type , ** kwargs ) : return self . place_order ( product_id , 'buy' , order_type , ** kwargs )
Place a buy order .
27,235
def sell ( self , product_id , order_type , ** kwargs ) : return self . place_order ( product_id , 'sell' , order_type , ** kwargs )
Place a sell order .
27,236
def place_market_order ( self , product_id , side , size = None , funds = None , client_oid = None , stp = None , overdraft_enabled = None , funding_amount = None ) : params = { 'product_id' : product_id , 'side' : side , 'order_type' : 'market' , 'size' : size , 'funds' : funds , 'client_oid' : client_oid , 'stp' : st...
Place market order .
27,237
def cancel_all ( self , product_id = None ) : if product_id is not None : params = { 'product_id' : product_id } else : params = None return self . _send_message ( 'delete' , '/orders' , params = params )
With best effort cancel all open orders .
27,238
def get_orders ( self , product_id = None , status = None , ** kwargs ) : params = kwargs if product_id is not None : params [ 'product_id' ] = product_id if status is not None : params [ 'status' ] = status return self . _send_paginated_message ( '/orders' , params = params )
List your current open orders .
27,239
def get_fundings ( self , status = None , ** kwargs ) : params = { } if status is not None : params [ 'status' ] = status params . update ( kwargs ) return self . _send_paginated_message ( '/funding' , params = params )
Every order placed with a margin profile that draws funding will create a funding record .
27,240
def repay_funding ( self , amount , currency ) : params = { 'amount' : amount , 'currency' : currency } return self . _send_message ( 'post' , '/funding/repay' , data = json . dumps ( params ) )
Repay funding . Repays the older funding records first .
27,241
def margin_transfer ( self , margin_profile_id , transfer_type , currency , amount ) : params = { 'margin_profile_id' : margin_profile_id , 'type' : transfer_type , 'currency' : currency , 'amount' : amount } return self . _send_message ( 'post' , '/profiles/margin-transfer' , data = json . dumps ( params ) )
Transfer funds between your standard profile and a margin profile .
27,242
def close_position ( self , repay_only ) : params = { 'repay_only' : repay_only } return self . _send_message ( 'post' , '/position/close' , data = json . dumps ( params ) )
Close position .
27,243
def withdraw ( self , amount , currency , payment_method_id ) : params = { 'amount' : amount , 'currency' : currency , 'payment_method_id' : payment_method_id } return self . _send_message ( 'post' , '/withdrawals/payment-method' , data = json . dumps ( params ) )
Withdraw funds to a payment method .
27,244
def coinbase_withdraw ( self , amount , currency , coinbase_account_id ) : params = { 'amount' : amount , 'currency' : currency , 'coinbase_account_id' : coinbase_account_id } return self . _send_message ( 'post' , '/withdrawals/coinbase-account' , data = json . dumps ( params ) )
Withdraw funds to a coinbase account .
27,245
def crypto_withdraw ( self , amount , currency , crypto_address ) : params = { 'amount' : amount , 'currency' : currency , 'crypto_address' : crypto_address } return self . _send_message ( 'post' , '/withdrawals/crypto' , data = json . dumps ( params ) )
Withdraw funds to a crypto address .
27,246
def create_report ( self , report_type , start_date , end_date , product_id = None , account_id = None , report_format = 'pdf' , email = None ) : params = { 'type' : report_type , 'start_date' : start_date , 'end_date' : end_date , 'format' : report_format } if product_id is not None : params [ 'product_id' ] = product...
Create report of historic information about your account .
27,247
def get_product_order_book ( self , product_id , level = 1 ) : params = { 'level' : level } return self . _send_message ( 'get' , '/products/{}/book' . format ( product_id ) , params = params )
Get a list of open orders for a product .
27,248
def get_product_trades ( self , product_id , before = '' , after = '' , limit = None , result = None ) : return self . _send_paginated_message ( '/products/{}/trades' . format ( product_id ) )
List the latest trades for a product .
27,249
def get_product_historic_rates ( self , product_id , start = None , end = None , granularity = None ) : params = { } if start is not None : params [ 'start' ] = start if end is not None : params [ 'end' ] = end if granularity is not None : acceptedGrans = [ 60 , 300 , 900 , 3600 , 21600 , 86400 ] if granularity not in ...
Historic rates for a product .
27,250
def _send_message ( self , method , endpoint , params = None , data = None ) : url = self . url + endpoint r = self . session . request ( method , url , params = params , data = data , auth = self . auth , timeout = 30 ) return r . json ( )
Send API request .
27,251
def _send_paginated_message ( self , endpoint , params = None ) : if params is None : params = dict ( ) url = self . url + endpoint while True : r = self . session . get ( url , params = params , auth = self . auth , timeout = 30 ) results = r . json ( ) for result in results : yield result if not r . headers . get ( '...
Send API message that results in a paginated response .
27,252
def check_cv ( cv = 3 , y = None , classifier = False ) : if cv is None : cv = 3 if not is_dask_collection ( y ) or not isinstance ( cv , numbers . Integral ) : return model_selection . check_cv ( cv , y , classifier ) if classifier : target_type = delayed ( type_of_target , pure = True ) ( y ) . compute ( ) if target_...
Dask aware version of sklearn . model_selection . check_cv
27,253
def compute_n_splits ( cv , X , y = None , groups = None ) : if not any ( is_dask_collection ( i ) for i in ( X , y , groups ) ) : return cv . get_n_splits ( X , y , groups ) if isinstance ( cv , ( _BaseKFold , BaseShuffleSplit ) ) : return cv . n_splits elif isinstance ( cv , PredefinedSplit ) : return len ( cv . uniq...
Return the number of splits .
27,254
def visualize ( self , filename = "mydask" , format = None , ** kwargs ) : check_is_fitted ( self , "dask_graph_" ) return dask . visualize ( self . dask_graph_ , filename = filename , format = format , ** kwargs )
Render the task graph for this parameter search using graphviz .
27,255
def fit ( self , X , y = None , groups = None , ** fit_params ) : estimator = self . estimator from sklearn . metrics . scorer import _check_multimetric_scoring scorer , multimetric = _check_multimetric_scoring ( estimator , scoring = self . scoring ) if not multimetric : scorer = scorer [ "score" ] self . multimetric_...
Run fit with all sets of parameters .
27,256
def _get_param_iterator ( self ) : return model_selection . ParameterSampler ( self . param_distributions , self . n_iter , random_state = self . random_state )
Return ParameterSampler instance for the given distributions
27,257
def _partial_fit ( model_and_meta , X , y , fit_params ) : with log_errors ( ) : start = time ( ) model , meta = model_and_meta if len ( X ) : model = deepcopy ( model ) model . partial_fit ( X , y , ** ( fit_params or { } ) ) meta = dict ( meta ) meta [ "partial_fit_calls" ] += 1 meta [ "partial_fit_time" ] = time ( )...
Call partial_fit on a classifiers with training data X and y
27,258
def _create_model ( model , ident , ** params ) : with log_errors ( pdb = True ) : model = clone ( model ) . set_params ( ** params ) return model , { "model_id" : ident , "params" : params , "partial_fit_calls" : 0 }
Create a model by cloning and then setting params
27,259
def fit ( model , params , X_train , y_train , X_test , y_test , additional_calls , fit_params = None , scorer = None , random_state = None , ) : return default_client ( ) . sync ( _fit , model , params , X_train , y_train , X_test , y_test , additional_calls , fit_params = fit_params , scorer = scorer , random_state =...
Find a good model and search among a space of hyper - parameters
27,260
def _check_array ( self , X , ** kwargs ) : if isinstance ( X , np . ndarray ) : X = da . from_array ( X , X . shape ) X = check_array ( X , ** kwargs ) return X
Validate the data arguments X and y .
27,261
def fit ( self , X , y , ** fit_params ) : return default_client ( ) . sync ( self . _fit , X , y , ** fit_params )
Find the best parameters for a particular model .
27,262
def _check_array ( self , X ) : if isinstance ( X , da . Array ) : if X . ndim == 2 and X . numblocks [ 1 ] > 1 : logger . debug ( "auto-rechunking 'X'" ) if not np . isnan ( X . chunks [ 0 ] ) . any ( ) : X = X . rechunk ( { 0 : "auto" , 1 : - 1 } ) else : X = X . rechunk ( { 1 : - 1 } ) return X
Validate an array for post - fit tasks .
27,263
def transform ( self , X ) : self . _check_method ( "transform" ) X = self . _check_array ( X ) if isinstance ( X , da . Array ) : return X . map_blocks ( _transform , estimator = self . _postfit_estimator ) elif isinstance ( X , dd . _Frame ) : return X . map_partitions ( _transform , estimator = self . _postfit_estim...
Transform block or partition - wise for dask inputs .
27,264
def score ( self , X , y , compute = True ) : scoring = self . scoring X = self . _check_array ( X ) y = self . _check_array ( y ) if not scoring : if type ( self . _postfit_estimator ) . score == sklearn . base . RegressorMixin . score : scoring = "r2" elif ( type ( self . _postfit_estimator ) . score == sklearn . bas...
Returns the score on the given data .
27,265
def predict ( self , X ) : self . _check_method ( "predict" ) X = self . _check_array ( X ) if isinstance ( X , da . Array ) : result = X . map_blocks ( _predict , dtype = "int" , estimator = self . _postfit_estimator , drop_axis = 1 ) return result elif isinstance ( X , dd . _Frame ) : return X . map_partitions ( _pre...
Predict for X .
27,266
def predict_log_proba ( self , X ) : self . _check_method ( "predict_log_proba" ) return da . log ( self . predict_proba ( X ) )
Log of proability estimates .
27,267
def _check_method ( self , method ) : estimator = self . _postfit_estimator if not hasattr ( estimator , method ) : msg = "The wrapped estimator '{}' does not have a '{}' method." . format ( estimator , method ) raise AttributeError ( msg ) return getattr ( estimator , method )
Check if self . estimator has method .
27,268
def fit ( self , X , y = None ) : X = self . _check_array ( X ) solver_kwargs = self . _get_solver_kwargs ( ) self . _coef = algorithms . _solvers [ self . solver ] ( X , y , ** solver_kwargs ) if self . fit_intercept : self . coef_ = self . _coef [ : - 1 ] self . intercept_ = self . _coef [ - 1 ] else : self . coef_ =...
Fit the model on the training data
27,269
def predict_proba ( self , X ) : X_ = self . _check_array ( X ) return sigmoid ( dot ( X_ , self . _coef ) )
Probability estimates for samples in X .
27,270
def predict ( self , X ) : X_ = self . _check_array ( X ) return exp ( dot ( X_ , self . _coef ) )
Predict count for samples in X .
27,271
def k_means ( X , n_clusters , init = "k-means||" , precompute_distances = "auto" , n_init = 1 , max_iter = 300 , verbose = False , tol = 1e-4 , random_state = None , copy_x = True , n_jobs = - 1 , algorithm = "full" , return_n_iter = False , oversampling_factor = 2 , init_max_iter = None , ) : labels , inertia , cente...
K - means algorithm for clustering
27,272
def k_init ( X , n_clusters , init = "k-means||" , random_state = None , max_iter = None , oversampling_factor = 2 , ) : if isinstance ( init , np . ndarray ) : K , P = init . shape if K != n_clusters : msg = ( "Number of centers in provided 'init' ({}) does " "not match 'n_clusters' ({})" ) raise ValueError ( msg . fo...
Choose the initial centers for K - Means .
27,273
def init_pp ( X , n_clusters , random_state ) : x_squared_norms = row_norms ( X , squared = True ) . compute ( ) logger . info ( "Initializing with k-means++" ) with _timer ( "initialization of %2d centers" % n_clusters , _logger = logger ) : centers = sk_k_means . _k_init ( X , n_clusters , random_state = random_state...
K - means initialization using k - means ++
27,274
def init_random ( X , n_clusters , random_state ) : logger . info ( "Initializing randomly" ) idx = sorted ( draw_seed ( random_state , 0 , len ( X ) , size = n_clusters ) ) centers = X [ idx ] . compute ( ) return centers
K - means initialization using randomly chosen points
27,275
def init_scalable ( X , n_clusters , random_state = None , max_iter = None , oversampling_factor = 2 ) : logger . info ( "Initializing with k-means||" ) idx = 0 centers = da . compute ( X [ idx , np . newaxis ] ) [ 0 ] c_idx = { idx } cost , = compute ( evaluate_cost ( X , centers ) ) if cost == 0 : n_iter = 0 else : n...
K - Means initialization using k - means||
27,276
def _sample_points ( X , centers , oversampling_factor , random_state ) : r distances = pairwise_distances ( X , centers ) . min ( 1 ) ** 2 denom = distances . sum ( ) p = oversampling_factor * distances / denom draws = random_state . uniform ( size = len ( p ) , chunks = p . chunks ) picked = p > draws new_idxs , = da...
r Sample points independently with probability
27,277
def transform ( self , X ) : if self . with_centering : check_is_fitted ( self , "center_" ) if self . with_scaling : check_is_fitted ( self , "scale_" ) X = self . _check_array ( X , self . copy ) if self . with_centering : X -= self . center_ if self . with_scaling : X /= self . scale_ return X
Center and scale the data .
27,278
def inverse_transform ( self , X ) : check_is_fitted ( self , "center_" , "scale_" ) if self . with_scaling : X *= self . scale_ if self . with_centering : X += self . center_ return X
Scale back the data to the original representation
27,279
def fit ( self , X , y = None ) : X = self . _check_array ( X ) if self . categories is not None : columns = pd . Index ( self . categories ) categories = self . categories elif isinstance ( X , pd . DataFrame ) : columns , categories = self . _fit ( X ) else : columns , categories = self . _fit_dask ( X ) self . colum...
Find the categorical columns .
27,280
def transform ( self , X , y = None ) : check_is_fitted ( self , "categories_" ) X = self . _check_array ( X ) . copy ( ) categories = self . categories_ for k , dtype in categories . items ( ) : if _HAS_CTD : if not isinstance ( dtype , pd . api . types . CategoricalDtype ) : dtype = pd . api . types . CategoricalDtyp...
Transform the columns in X according to self . categories_ .
27,281
def fit ( self , X , y = None ) : self . columns_ = X . columns columns = self . columns if columns is None : columns = X . select_dtypes ( include = [ "category" ] ) . columns else : for column in columns : assert is_categorical_dtype ( X [ column ] ) , "Must be categorical" self . categorical_columns_ = columns self ...
Determine the categorical columns to be dummy encoded .
27,282
def transform ( self , X , y = None ) : if not X . columns . equals ( self . columns_ ) : raise ValueError ( "Columns of 'X' do not match the training " "columns. Got {!r}, expected {!r}" . format ( X . columns , self . columns ) ) if isinstance ( X , pd . DataFrame ) : return pd . get_dummies ( X , drop_first = self ....
Dummy encode the categorical columns in X
27,283
def inverse_transform ( self , X ) : if isinstance ( X , np . ndarray ) : X = pd . DataFrame ( X , columns = self . transformed_columns_ ) elif isinstance ( X , da . Array ) : unknown = np . isnan ( X . chunks [ 0 ] ) . any ( ) if unknown : lengths = blockwise ( len , "i" , X [ : , 0 ] , "i" , dtype = "i8" ) . compute ...
Inverse dummy - encode the columns in X
27,284
def fit ( self , X , y = None ) : self . columns_ = X . columns columns = self . columns if columns is None : columns = X . select_dtypes ( include = [ "category" ] ) . columns else : for column in columns : assert is_categorical_dtype ( X [ column ] ) , "Must be categorical" self . categorical_columns_ = columns self ...
Determine the categorical columns to be encoded .
27,285
def transform ( self , X , y = None ) : if not X . columns . equals ( self . columns_ ) : raise ValueError ( "Columns of 'X' do not match the training " "columns. Got {!r}, expected {!r}" . format ( X . columns , self . columns ) ) if not isinstance ( X , ( pd . DataFrame , dd . DataFrame ) ) : raise TypeError ( "Unexp...
Ordinal encode the categorical columns in X
27,286
def inverse_transform ( self , X ) : if isinstance ( X , np . ndarray ) : X = pd . DataFrame ( X , columns = self . columns_ ) elif isinstance ( X , da . Array ) : unknown = np . isnan ( X . chunks [ 0 ] ) . any ( ) if unknown : lengths = blockwise ( len , "i" , X [ : , 0 ] , "i" , dtype = "i8" ) . compute ( ) X = X . ...
Inverse ordinal - encode the columns in X
27,287
def fit ( model , x , y , compute = True , shuffle_blocks = True , random_state = None , ** kwargs ) : if not hasattr ( x , "chunks" ) and hasattr ( x , "to_dask_array" ) : x = x . to_dask_array ( ) assert x . ndim == 2 if y is not None : if not hasattr ( y , "chunks" ) and hasattr ( y , "to_dask_array" ) : y = y . to_...
Fit scikit learn model against dask arrays
27,288
def predict ( model , x ) : if not hasattr ( x , "chunks" ) and hasattr ( x , "to_dask_array" ) : x = x . to_dask_array ( ) assert x . ndim == 2 if len ( x . chunks [ 1 ] ) > 1 : x = x . rechunk ( chunks = ( x . chunks [ 0 ] , sum ( x . chunks [ 1 ] ) ) ) func = partial ( _predict , model ) xx = np . zeros ( ( 1 , x . ...
Predict with a scikit learn model
27,289
def _slice_mostly_sorted ( array , keep , rest , ind = None ) : if ind is None : ind = np . arange ( len ( array ) ) idx = np . argsort ( np . concatenate ( [ keep , ind [ rest ] ] ) ) slices = [ ] if keep [ 0 ] > 0 : slices . append ( slice ( None , keep [ 0 ] ) ) slices . append ( [ keep [ 0 ] ] ) windows = zip ( kee...
Slice dask array array that is almost entirely sorted already .
27,290
def make_counts ( n_samples = 1000 , n_features = 100 , n_informative = 2 , scale = 1.0 , chunks = 100 , random_state = None , ) : rng = dask_ml . utils . check_random_state ( random_state ) X = rng . normal ( 0 , 1 , size = ( n_samples , n_features ) , chunks = ( chunks , n_features ) ) informative_idx = rng . choice ...
Generate a dummy dataset for modeling count data .
27,291
def make_blobs ( n_samples = 100 , n_features = 2 , centers = None , cluster_std = 1.0 , center_box = ( - 10.0 , 10.0 ) , shuffle = True , random_state = None , chunks = None , ) : chunks = da . core . normalize_chunks ( chunks , ( n_samples , n_features ) ) _check_axis_partitioning ( chunks , n_features ) if centers i...
Generate isotropic Gaussian blobs for clustering .
27,292
def make_regression ( n_samples = 100 , n_features = 100 , n_informative = 10 , n_targets = 1 , bias = 0.0 , effective_rank = None , tail_strength = 0.5 , noise = 0.0 , shuffle = True , coef = False , random_state = None , chunks = None , ) : chunks = da . core . normalize_chunks ( chunks , ( n_samples , n_features ) )...
Generate a random regression problem .
27,293
def fit_transform ( self , X , y = None ) : X = self . _check_array ( X ) if self . algorithm not in { "tsqr" , "randomized" } : raise ValueError ( ) if self . algorithm == "tsqr" : u , s , v = da . linalg . svd ( X ) u = u [ : , : self . n_components ] s = s [ : self . n_components ] v = v [ : self . n_components ] el...
Fit model to X and perform dimensionality reduction on X .
27,294
def _hstack ( self , Xs ) : types = set ( type ( X ) for X in Xs ) if self . sparse_output_ : return sparse . hstack ( Xs ) . tocsr ( ) elif dd . Series in types or dd . DataFrame in types : with warnings . catch_warnings ( ) : warnings . filterwarnings ( "ignore" , "Concatenating" , UserWarning ) return dd . concat ( ...
Stacks X horizontally .
27,295
def transform ( self , X ) : check_is_fitted ( self , [ "mean_" , "components_" ] , all_or_any = all ) if self . mean_ is not None : X = X - self . mean_ X_transformed = da . dot ( X , self . components_ . T ) if self . whiten : X_transformed /= np . sqrt ( self . explained_variance_ ) return X_transformed
Apply dimensionality reduction on X .
27,296
def fit_transform ( self , X , y = None ) : U , S , V = self . _fit ( X ) U = U [ : , : self . n_components_ ] if self . whiten : U *= np . sqrt ( X . shape [ 0 ] - 1 ) else : U *= S [ : self . n_components_ ] return U
Fit the model with X and apply the dimensionality reduction on X .
27,297
def inverse_transform ( self , X ) : check_is_fitted ( self , "mean_" ) if self . whiten : return ( da . dot ( X , np . sqrt ( self . explained_variance_ [ : , np . newaxis ] ) * self . components_ , ) + self . mean_ ) else : return da . dot ( X , self . components_ ) + self . mean_
Transform data back to its original space .
27,298
def score_samples ( self , X ) : check_is_fitted ( self , "mean_" ) Xr = X - self . mean_ n_features = X . shape [ 1 ] precision = self . get_precision ( ) log_like = - 0.5 * ( Xr * ( da . dot ( Xr , precision ) ) ) . sum ( axis = 1 ) log_like -= 0.5 * ( n_features * da . log ( 2.0 * np . pi ) - fast_logdet ( precision...
Return the log - likelihood of each sample .
27,299
def assert_estimator_equal ( left , right , exclude = None , ** kwargs ) : left_attrs = [ x for x in dir ( left ) if x . endswith ( "_" ) and not x . startswith ( "_" ) ] right_attrs = [ x for x in dir ( right ) if x . endswith ( "_" ) and not x . startswith ( "_" ) ] if exclude is None : exclude = set ( ) elif isinsta...
Check that two Estimators are equal