idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
36,400
def get_file_checksum ( self , path , ** kwargs ) : metadata_response = self . _get ( path , 'GETFILECHECKSUM' , expected_status = httplib . TEMPORARY_REDIRECT , ** kwargs ) assert not metadata_response . content data_response = self . _requests_session . get ( metadata_response . headers [ 'location' ] , ** self . _re...
Get the checksum of a file .
36,401
def set_permission ( self , path , ** kwargs ) : response = self . _put ( path , 'SETPERMISSION' , ** kwargs ) assert not response . content
Set permission of a path .
36,402
def set_times ( self , path , ** kwargs ) : response = self . _put ( path , 'SETTIMES' , ** kwargs ) assert not response . content
Set access time of a file .
36,403
def set_xattr ( self , path , xattr_name , xattr_value , flag , ** kwargs ) : kwargs [ 'xattr.name' ] = xattr_name kwargs [ 'xattr.value' ] = xattr_value response = self . _put ( path , 'SETXATTR' , flag = flag , ** kwargs ) assert not response . content
Set an xattr of a file or directory .
36,404
def remove_xattr ( self , path , xattr_name , ** kwargs ) : kwargs [ 'xattr.name' ] = xattr_name response = self . _put ( path , 'REMOVEXATTR' , ** kwargs ) assert not response . content
Remove an xattr of a file or directory .
36,405
def get_xattrs ( self , path , xattr_name = None , encoding = 'text' , ** kwargs ) : kwargs [ 'xattr.name' ] = xattr_name json = _json ( self . _get ( path , 'GETXATTRS' , encoding = encoding , ** kwargs ) ) [ 'XAttrs' ] result = { } for attr in json : k = attr [ 'name' ] v = attr [ 'value' ] if v is None : result [ k ...
Get one or more xattr values for a file or directory .
36,406
def list_xattrs ( self , path , ** kwargs ) : return simplejson . loads ( _json ( self . _get ( path , 'LISTXATTRS' , ** kwargs ) ) [ 'XAttrNames' ] )
Get all of the xattr names for a file or directory .
36,407
def delete_snapshot ( self , path , snapshotname , ** kwargs ) : response = self . _delete ( path , 'DELETESNAPSHOT' , snapshotname = snapshotname , ** kwargs ) assert not response . content
Delete a snapshot of a directory
36,408
def rename_snapshot ( self , path , oldsnapshotname , snapshotname , ** kwargs ) : response = self . _put ( path , 'RENAMESNAPSHOT' , oldsnapshotname = oldsnapshotname , snapshotname = snapshotname , ** kwargs ) assert not response . content
Rename a snapshot
36,409
def listdir ( self , path , ** kwargs ) : statuses = self . list_status ( path , ** kwargs ) if len ( statuses ) == 1 and statuses [ 0 ] . pathSuffix == '' and statuses [ 0 ] . type == 'FILE' : raise NotADirectoryError ( 'Not a directory: {!r}' . format ( path ) ) return [ f . pathSuffix for f in statuses ]
Return a list containing names of files in the given path
36,410
def exists ( self , path , ** kwargs ) : try : self . get_file_status ( path , ** kwargs ) return True except HdfsFileNotFoundException : return False
Return true if the given path exists
36,411
def walk ( self , top , topdown = True , onerror = None , ** kwargs ) : try : listing = self . list_status ( top , ** kwargs ) except HdfsException as e : if onerror is not None : onerror ( e ) return dirnames , filenames = [ ] , [ ] for f in listing : if f . type == 'DIRECTORY' : dirnames . append ( f . pathSuffix ) e...
See os . walk for documentation
36,412
def copy_from_local ( self , localsrc , dest , ** kwargs ) : with io . open ( localsrc , 'rb' ) as f : self . create ( dest , f , ** kwargs )
Copy a single file from the local file system to dest
36,413
def copy_to_local ( self , src , localdest , ** kwargs ) : with self . open ( src , ** kwargs ) as fsrc : with io . open ( localdest , 'wb' ) as fdst : shutil . copyfileobj ( fsrc , fdst )
Copy a single file from src to the local file system
36,414
def get_active_namenode ( self , max_staleness = None ) : if ( max_staleness is None or self . _last_time_recorded_active is None or self . _last_time_recorded_active < time . time ( ) - max_staleness ) : self . get_file_status ( '/' ) return self . hosts [ 0 ]
Return the address of the currently active NameNode .
36,415
def next ( self ) : if self . index < self . length : index = self . index self . index += 1 return self . rules [ index ] else : raise StopIteration ( )
Returns the next rule .
36,416
def _build_tree ( self , actor , content ) : depth = actor . depth row = "" for i in xrange ( depth - 1 ) : row += "| " if depth > 0 : row += "|-" name = actor . name if name != actor . __class__ . __name__ : name = actor . __class__ . __name__ + " '" + name + "'" row += name quickinfo = actor . quickinfo if quickinfo ...
Builds the tree for the given actor .
36,417
def save ( cls , flow , fname ) : result = None try : f = open ( fname , 'w' ) f . write ( flow . to_json ( ) ) f . close ( ) except Exception , e : result = str ( e ) return result
Saves the flow to a JSON file .
36,418
def setup ( self ) : result = super ( BranchDirector , self ) . setup ( ) if result is None : try : self . check_actors ( ) except Exception , e : result = str ( e ) return result
Performs some checks .
36,419
def next ( self ) : if self . index < self . length : index = self . index self . index += 1 return self . data [ index ] else : raise StopIteration ( )
Returns the next element from the array .
36,420
def plot_cluster_assignments ( evl , data , atts = None , inst_no = False , size = 10 , title = None , outfile = None , wait = True ) : if not plot . matplotlib_available : logger . error ( "Matplotlib is not installed, plotting unavailable!" ) return fig = plt . figure ( ) if data . class_index == - 1 : c = None else ...
Plots the cluster assignments against the specified attributes .
36,421
def write_all ( filename , jobjects ) : array = javabridge . get_env ( ) . make_object_array ( len ( jobjects ) , javabridge . get_env ( ) . find_class ( "java/lang/Object" ) ) for i in xrange ( len ( jobjects ) ) : obj = jobjects [ i ] if isinstance ( obj , JavaObject ) : obj = obj . jobject javabridge . get_env ( ) ....
Serializes the list of objects to disk . JavaObject instances get automatically unwrapped .
36,422
def plot_classifier_errors ( predictions , absolute = True , max_relative_size = 50 , absolute_size = 50 , title = None , outfile = None , wait = True ) : if not plot . matplotlib_available : logger . error ( "Matplotlib is not installed, plotting unavailable!" ) return actual = [ ] predicted = [ ] error = None cls = N...
Plots the classifers for the given list of predictions .
36,423
def values ( self , index ) : values = [ ] for i in xrange ( self . num_instances ) : inst = self . get_instance ( i ) values . append ( inst . get_value ( index ) ) return numpy . array ( values )
Returns the internal values of this attribute from all the instance objects .
36,424
def next ( self ) : if self . row < self . data . num_instances : index = self . row self . row += 1 return self . data . get_instance ( index ) else : raise StopIteration ( )
Returns the next row from the Instances object .
36,425
def next ( self ) : if self . col < self . data . num_attributes : index = self . col self . col += 1 return self . data . attribute ( index ) else : raise StopIteration ( )
Returns the next attribute from the Instances object .
36,426
def check ( self , diff ) : path = diff . b_path assert any ( path . endswith ( ext ) for ext in importlib . machinery . SOURCE_SUFFIXES )
Check that the new file introduced is a python source file
36,427
def check ( self , diff ) : path = diff . b_path contrib_path = self . project . contrib_module_path assert pathlib . Path ( contrib_path ) in pathlib . Path ( path ) . parents
Check that the new file is within the contrib subdirectory
36,428
def check ( self , diff ) : relative_path = relative_to_contrib ( diff , self . project ) subpackage_name = relative_path . parts [ 0 ] assert re_test ( SUBPACKAGE_NAME_REGEX , subpackage_name )
Check that the name of the subpackage within contrib is valid
36,429
def check ( self , diff ) : relative_path = relative_to_contrib ( diff , self . project ) assert len ( relative_path . parts ) == 2
Check that the new file introduced is at the proper depth
36,430
def check ( self , diff ) : r filename = pathlib . Path ( diff . b_path ) . parts [ - 1 ] is_valid_feature_module_name = re_test ( FEATURE_MODULE_NAME_REGEX , filename ) is_valid_init_module_name = filename == '__init__.py' assert is_valid_feature_module_name or is_valid_init_module_name
r Check that the new file introduced has a valid name
36,431
def enable ( logger = logger , level = logging . INFO , format = DETAIL_LOG_FORMAT , echo = True ) : global _handler if _handler is None : _handler = logging . StreamHandler ( ) formatter = logging . Formatter ( format ) _handler . setFormatter ( formatter ) level = logging . _checkLevel ( level ) levelName = logging ....
Enable simple console logging for this module
36,432
def default_base_dir ( ) : base_dir = os . environ . get ( 'OS_REFRESH_CONFIG_BASE_DIR' ) if base_dir is None : if os . path . isdir ( OLD_BASE_DIR ) and not os . path . isdir ( DEFAULT_BASE_DIR ) : logging . warning ( 'Base directory %s is deprecated. The recommended ' 'base directory is %s' , OLD_BASE_DIR , DEFAULT_B...
Determine the default base directory path
36,433
def blacken_code ( code ) : if black is None : raise NotImplementedError major , minor , _ = platform . python_version_tuple ( ) pyversion = 'py{major}{minor}' . format ( major = major , minor = minor ) target_versions = [ black . TargetVersion [ pyversion . upper ( ) ] ] line_length = black . DEFAULT_LINE_LENGTH strin...
Format code content using Black
36,434
def _writer ( func ) : name = func . __name__ return property ( fget = lambda self : getattr ( self , '_%s' % name ) , fset = func )
Decorator for a custom writer but a default reader
36,435
def _get_subject_alt ( self , name ) : if self . _subject_alt_name is None : return [ ] output = [ ] for general_name in self . _subject_alt_name : if general_name . name == name : output . append ( general_name . native ) return output
Returns the native value for each value in the subject alt name extension reqiest that is an asn1crypto . x509 . GeneralName of the type specified by the name param
36,436
def _set_subject_alt ( self , name , values ) : if self . _subject_alt_name is not None : filtered_general_names = [ ] for general_name in self . _subject_alt_name : if general_name . name != name : filtered_general_names . append ( general_name ) self . _subject_alt_name = x509 . GeneralNames ( filtered_general_names ...
Replaces all existing asn1crypto . x509 . GeneralName objects of the choice represented by the name parameter with the values
36,437
def compute_metrics_cv ( self , X , y , ** kwargs ) : results = self . cv_score_mean ( X , y ) return results
Compute cross - validated metrics .
36,438
def cv_score_mean ( self , X , y ) : X , y = self . _format_inputs ( X , y ) if self . problem_type . binary_classification : kf = StratifiedKFold ( shuffle = True , random_state = RANDOM_STATE + 3 ) elif self . problem_type . multi_classification : self . target_type_transformer . inverse_transform ( y ) transformer =...
Compute mean score across cross validation folds .
36,439
def get_contrib_features ( project_root ) : project = Project ( project_root ) contrib = project . _resolve ( '.features.contrib' ) return _get_contrib_features ( contrib )
Get contributed features for a project at project_root
36,440
def _get_contrib_features ( module ) : if isinstance ( module , types . ModuleType ) : if hasattr ( module , '__path__' ) : yield from _get_contrib_features_from_package ( module ) else : yield _get_contrib_feature_from_module ( module ) else : raise ValueError ( 'Input is not a module' )
Get contributed features from within given module
36,441
def quickstart ( ) : import ballet . templating import ballet . util . log ballet . util . log . enable ( level = 'INFO' , format = ballet . util . log . SIMPLE_LOG_FORMAT , echo = False ) ballet . templating . render_project_template ( )
Generate a brand - new ballet project
36,442
def update_project_template ( push ) : import ballet . update import ballet . util . log ballet . util . log . enable ( level = 'INFO' , format = ballet . util . log . SIMPLE_LOG_FORMAT , echo = False ) ballet . update . update_project_template ( push = push )
Update an existing ballet project from the upstream template
36,443
def start_new_feature ( ) : import ballet . templating import ballet . util . log ballet . util . log . enable ( level = 'INFO' , format = ballet . util . log . SIMPLE_LOG_FORMAT , echo = False ) ballet . templating . start_new_feature ( )
Start working on a new feature from a template
36,444
def write_tabular ( obj , filepath ) : _ , fn , ext = splitext2 ( filepath ) if ext == '.h5' : _write_tabular_h5 ( obj , filepath ) elif ext == '.pkl' : _write_tabular_pickle ( obj , filepath ) else : raise NotImplementedError
Write tabular object in HDF5 or pickle format
36,445
def read_tabular ( filepath ) : _ , fn , ext = splitext2 ( filepath ) if ext == '.h5' : return _read_tabular_h5 ( filepath ) elif ext == '.pkl' : return _read_tabular_pickle ( filepath ) else : raise NotImplementedError
Read tabular object in HDF5 or pickle format
36,446
def load_table_from_config ( input_dir , config ) : path = pathlib . Path ( input_dir ) . joinpath ( config [ 'path' ] ) kwargs = config [ 'pd_read_kwargs' ] return pd . read_csv ( path , ** kwargs )
Load table from table config dict
36,447
def validate_feature_api ( project , force = False ) : if not force and not project . on_pr ( ) : raise SkippedValidationTest ( 'Not on PR' ) validator = FeatureApiValidator ( project ) result = validator . validate ( ) if not result : raise InvalidFeatureApi
Validate feature API
36,448
def evaluate_feature_performance ( project , force = False ) : if not force and not project . on_pr ( ) : raise SkippedValidationTest ( 'Not on PR' ) out = project . build ( ) X_df , y , features = out [ 'X_df' ] , out [ 'y' ] , out [ 'features' ] proposed_feature = get_proposed_feature ( project ) accepted_features = ...
Evaluate feature performance
36,449
def prune_existing_features ( project , force = False ) : if not force and not project . on_master_after_merge ( ) : raise SkippedValidationTest ( 'Not on master' ) out = project . build ( ) X_df , y , features = out [ 'X_df' ] , out [ 'y' ] , out [ 'features' ] proposed_feature = get_proposed_feature ( project ) accep...
Prune existing features
36,450
def spliceext ( filepath , s ) : root , ext = os . path . splitext ( safepath ( filepath ) ) return root + s + ext
Add s into filepath before the extension
36,451
def replaceext ( filepath , new_ext ) : if new_ext and new_ext [ 0 ] != '.' : new_ext = '.' + new_ext root , ext = os . path . splitext ( safepath ( filepath ) ) return root + new_ext
Replace any existing file extension with a new one
36,452
def splitext2 ( filepath ) : root , filename = os . path . split ( safepath ( filepath ) ) filename , ext = os . path . splitext ( safepath ( filename ) ) return root , filename , ext
Split filepath into root filename ext
36,453
def isemptyfile ( filepath ) : exists = os . path . exists ( safepath ( filepath ) ) if exists : filesize = os . path . getsize ( safepath ( filepath ) ) return filesize == 0 else : return False
Determine if the file both exists and isempty
36,454
def synctree ( src , dst , onexist = None ) : src = pathlib . Path ( src ) . resolve ( ) dst = pathlib . Path ( dst ) . resolve ( ) if not src . is_dir ( ) : raise ValueError if dst . exists ( ) and not dst . is_dir ( ) : raise ValueError if onexist is None : def onexist ( ) : pass _synctree ( src , dst , onexist )
Recursively sync files at directory src to dst
36,455
def estimate_cont_entropy ( X , epsilon = None ) : X = asarray2d ( X ) n_samples , n_features = X . shape if n_samples <= 1 : return 0 nn = NearestNeighbors ( metric = 'chebyshev' , n_neighbors = NUM_NEIGHBORS , algorithm = 'kd_tree' ) nn . fit ( X ) if epsilon is None : n_neighbors = NUM_NEIGHBORS radius = 0 while not...
Estimate the Shannon entropy of a discrete dataset .
36,456
def estimate_entropy ( X , epsilon = None ) : r X = asarray2d ( X ) n_samples , n_features = X . shape if n_features < 1 : return 0 disc_mask = _get_discrete_columns ( X ) cont_mask = ~ disc_mask if np . all ( disc_mask ) : return calculate_disc_entropy ( X ) elif np . all ( cont_mask ) : return estimate_cont_entropy (...
r Estimate a dataset s Shannon entropy .
36,457
def estimate_conditional_information ( x , y , z ) : xz = np . concatenate ( ( x , z ) , axis = 1 ) yz = np . concatenate ( ( y , z ) , axis = 1 ) xyz = np . concatenate ( ( xz , y ) , axis = 1 ) epsilon = _calculate_epsilon ( xyz ) h_xz = estimate_entropy ( xz , epsilon ) h_yz = estimate_entropy ( yz , epsilon ) h_xyz...
Estimate the conditional mutual information of three datasets .
36,458
def estimate_mutual_information ( x , y ) : xy = np . concatenate ( ( x , y ) , axis = 1 ) epsilon = _calculate_epsilon ( xy ) h_x = estimate_entropy ( x , epsilon ) h_y = estimate_entropy ( y , epsilon ) h_xy = estimate_entropy ( xy , epsilon ) return max ( 0 , h_x + h_y - h_xy )
Estimate the mutual information of two datasets .
36,459
def get_diff_endpoints_from_commit_range ( repo , commit_range ) : if not commit_range : raise ValueError ( 'commit_range cannot be empty' ) result = re_find ( COMMIT_RANGE_REGEX , commit_range ) if not result : raise ValueError ( 'Expected diff str of the form \'a..b\' or \'a...b\' (got {})' . format ( commit_range ) ...
Get endpoints of a diff given a commit range
36,460
def set_config_variables ( repo , variables ) : with repo . config_writer ( ) as writer : for k , value in variables . items ( ) : section , option = k . split ( '.' ) writer . set_value ( section , option , value ) writer . release ( )
Set config variables
36,461
def validate ( self ) : changes = self . change_collector . collect_changes ( ) features = [ ] imported_okay = True for importer , modname , modpath in changes . new_feature_info : try : mod = importer ( ) features . extend ( _get_contrib_features ( mod ) ) except ( ImportError , SyntaxError ) : logger . info ( 'Failed...
Collect and validate all new features
36,462
def load_config_at_path ( path ) : if path . exists ( ) and path . is_file ( ) : with path . open ( 'r' ) as f : return yaml . load ( f , Loader = yaml . SafeLoader ) else : raise ConfigurationError ( "Couldn't find ballet.yml config file." )
Load config at exact path
36,463
def config_get ( config , * path , default = None ) : o = object ( ) result = get_in ( config , path , default = o ) if result is not o : return result else : return default
Get a configuration option following a path through the config
36,464
def make_config_get ( conf_path ) : project_root = _get_project_root_from_conf_path ( conf_path ) config = load_config_in_dir ( project_root ) return partial ( config_get , config )
Return a function to get configuration options for a specific project
36,465
def relative_to_contrib ( diff , project ) : path = pathlib . Path ( diff . b_path ) contrib_path = project . contrib_module_path return path . relative_to ( contrib_path )
Compute relative path of changed file to contrib dir
36,466
def pr_num ( self ) : result = get_pr_num ( repo = self . repo ) if result is None : result = get_travis_pr_num ( ) return result
Return the PR number or None if not on a PR
36,467
def branch ( self ) : result = get_branch ( repo = self . repo ) if result is None : result = get_travis_branch ( ) return result
Return whether the project is on master branch
36,468
def asarray2d ( a ) : arr = np . asarray ( a ) if arr . ndim == 1 : arr = arr . reshape ( - 1 , 1 ) return arr
Cast to 2d array
36,469
def indent ( text , n = 4 ) : _indent = ' ' * n return '\n' . join ( _indent + line for line in text . split ( '\n' ) )
Indent each line of text by n spaces
36,470
def has_nans ( obj ) : nans = np . isnan ( obj ) while np . ndim ( nans ) : nans = np . any ( nans ) return bool ( nans )
Check if obj has any NaNs
36,471
def needs_path ( f ) : @ wraps ( f ) def wrapped ( pathlike , * args , ** kwargs ) : path = pathlib . Path ( pathlike ) return f ( path , * args , ** kwargs ) return wrapped
Wraps a function that accepts path - like to give it a pathlib . Path
36,472
def import_module_at_path ( modname , modpath ) : modpath = pathlib . Path ( modpath ) . resolve ( ) if modpath . name == '__init__.py' : raise ValueError ( 'Don\'t provide the __init__.py!' ) def is_package ( modpath ) : return modpath . suffix != '.py' def has_init ( dir ) : return dir . joinpath ( '__init__.py' ) . ...
Import module from path that may not be on system path
36,473
def relpath_to_modname ( relpath ) : p = pathlib . Path ( relpath ) if p . name == '__init__.py' : p = p . parent elif p . suffix == '.py' : p = p . with_suffix ( '' ) else : msg = 'Cannot convert a non-python file to a modname' msg_detail = 'The relpath given is: {}' . format ( relpath ) logger . error ( msg + '\n' + ...
Convert relative path to module name
36,474
def modname_to_relpath ( modname , project_root = None , add_init = True ) : parts = modname . split ( '.' ) relpath = pathlib . Path ( * parts ) if project_root is not None : relpath_resolved = pathlib . Path ( project_root ) . joinpath ( relpath ) else : relpath_resolved = relpath if relpath_resolved . is_dir ( ) : i...
Convert module name to relative path .
36,475
def check ( self , feature ) : mapper = feature . as_dataframe_mapper ( ) mapper . fit ( self . X , y = self . y )
Check that fit can be called on reference data
36,476
def check ( self , feature ) : mapper = feature . as_dataframe_mapper ( ) mapper . fit_transform ( self . X , y = self . y )
Check that fit_transform can be called on reference data
36,477
def check ( self , feature ) : mapper = feature . as_dataframe_mapper ( ) X = mapper . fit_transform ( self . X , y = self . y ) assert self . X . shape [ 0 ] == X . shape [ 0 ]
Check that the dimensions of the transformed data are correct
36,478
def check ( self , feature ) : try : buf = io . BytesIO ( ) pickle . dump ( feature , buf , protocol = pickle . HIGHEST_PROTOCOL ) buf . seek ( 0 ) new_feature = pickle . load ( buf ) assert new_feature is not None assert isinstance ( new_feature , Feature ) finally : buf . close ( )
Check that the feature can be pickled
36,479
def check ( self , feature ) : mapper = feature . as_dataframe_mapper ( ) X = mapper . fit_transform ( self . X , y = self . y ) assert not np . any ( np . isnan ( X ) )
Check that the output of the transformer has no missing values
36,480
def make_multi_lagger ( lags , groupby_kwargs = None ) : laggers = [ SingleLagger ( l , groupby_kwargs = groupby_kwargs ) for l in lags ] feature_union = FeatureUnion ( [ ( repr ( lagger ) , lagger ) for lagger in laggers ] ) return feature_union
Return a union of transformers that apply different lags
36,481
def start_new_feature ( ** cc_kwargs ) : project = Project . from_path ( pathlib . Path . cwd ( ) . resolve ( ) ) contrib_dir = project . get ( 'contrib' , 'module_path' ) with tempfile . TemporaryDirectory ( ) as tempdir : output_dir = tempdir cc_kwargs [ 'output_dir' ] = output_dir rendered_dir = render_feature_templ...
Start a new feature within a ballet project
36,482
def get_proposed_feature ( project ) : change_collector = ChangeCollector ( project ) collected_changes = change_collector . collect_changes ( ) try : new_feature_info = one_or_raise ( collected_changes . new_feature_info ) importer , _ , _ = new_feature_info except ValueError : raise BalletError ( 'Too many features c...
Get the proposed feature
36,483
def get_accepted_features ( features , proposed_feature ) : def eq ( feature ) : return feature . source == proposed_feature . source result = lfilter ( complement ( eq ) , features ) if len ( features ) - len ( result ) == 1 : return result elif len ( result ) == len ( features ) : raise BalletError ( 'Did not find ma...
Deselect candidate features from list of all features
36,484
def collect_changes ( self ) : file_diffs = self . _collect_file_diffs ( ) candidate_feature_diffs , valid_init_diffs , inadmissible_diffs = self . _categorize_file_diffs ( file_diffs ) new_feature_info = self . _collect_feature_info ( candidate_feature_diffs ) return CollectedChanges ( file_diffs , candidate_feature_d...
Collect file and feature changes
36,485
def _categorize_file_diffs ( self , file_diffs ) : candidate_feature_diffs = [ ] valid_init_diffs = [ ] inadmissible_files = [ ] for diff in file_diffs : valid , failures = check_from_class ( ProjectStructureCheck , diff , self . project ) if valid : if pathlib . Path ( diff . b_path ) . parts [ - 1 ] != '__init__.py' ...
Partition file changes into admissible and inadmissible changes
36,486
def _collect_feature_info ( self , candidate_feature_diffs ) : project_root = self . project . path for diff in candidate_feature_diffs : path = diff . b_path modname = relpath_to_modname ( path ) modpath = project_root . joinpath ( path ) importer = partial ( import_module_at_path , modname , modpath ) yield importer ...
Collect feature info
36,487
def get_travis_branch ( ) : try : travis_pull_request = get_travis_env_or_fail ( 'TRAVIS_PULL_REQUEST' ) if truthy ( travis_pull_request ) : travis_pull_request_branch = get_travis_env_or_fail ( 'TRAVIS_PULL_REQUEST_BRANCH' ) return travis_pull_request_branch else : travis_branch = get_travis_env_or_fail ( 'TRAVIS_BRAN...
Get current branch per Travis environment variables
36,488
def make_mapper ( features ) : if not features : features = Feature ( input = [ ] , transformer = NullTransformer ( ) ) if not iterable ( features ) : features = ( features , ) return DataFrameMapper ( [ t . as_input_transformer_tuple ( ) for t in features ] , input_df = True )
Make a DataFrameMapper from a feature or list of features
36,489
def _name_estimators ( estimators ) : def get_name ( estimator ) : if isinstance ( estimator , DelegatingRobustTransformer ) : return get_name ( estimator . _transformer ) return type ( estimator ) . __name__ . lower ( ) names = list ( map ( get_name , estimators ) ) counter = dict ( Counter ( names ) ) counter = selec...
Generate names for estimators .
36,490
def _push ( project ) : repo = project . repo remote_name = project . get ( 'project' , 'remote' ) remote = repo . remote ( remote_name ) result = _call_remote_push ( remote ) failures = lfilter ( complement ( did_git_push_succeed ) , result ) if failures : for push_info in failures : logger . error ( 'Failed to push r...
Push default branch and project template branch to remote
36,491
def build ( X_df = None , y_df = None ) : if X_df is None : X_df , _ = load_data ( ) if y_df is None : _ , y_df = load_data ( ) features = get_contrib_features ( ) mapper_X = ballet . feature . make_mapper ( features ) X = mapper_X . fit_transform ( X_df ) encoder_y = get_target_encoder ( ) y = encoder_y . fit_transfor...
Build features and target
36,492
def _write_header ( self , epoch_data : EpochData ) -> None : self . _variables = self . _variables or list ( next ( iter ( epoch_data . values ( ) ) ) . keys ( ) ) self . _streams = epoch_data . keys ( ) header = [ '"epoch_id"' ] for stream_name in self . _streams : header += [ stream_name + '_' + var for var in self ...
Write CSV header row with column names .
36,493
def _write_row ( self , epoch_id : int , epoch_data : EpochData ) -> None : values = [ epoch_id ] for stream_name in self . _streams : for variable_name in self . _variables : column_name = stream_name + '_' + variable_name try : value = epoch_data [ stream_name ] [ variable_name ] except KeyError as ex : err_message =...
Write a single epoch result row to the CSV file .
36,494
def after_epoch ( self , epoch_id : int , epoch_data : EpochData ) -> None : logging . debug ( 'Saving epoch %d data to "%s"' , epoch_id , self . _file_path ) if not self . _header_written : self . _write_header ( epoch_data = epoch_data ) self . _write_row ( epoch_id = epoch_id , epoch_data = epoch_data )
Write a new row to the CSV file with the given epoch data .
36,495
def get_random_name ( sep : str = '-' ) : r = random . SystemRandom ( ) return '{}{}{}' . format ( r . choice ( _left ) , sep , r . choice ( _right ) )
Generate random docker - like name with the given separator .
36,496
def _check_train_time ( self ) -> None : if self . _minutes is not None and ( datetime . now ( ) - self . _training_start ) . total_seconds ( ) / 60 > self . _minutes : raise TrainingTerminated ( 'Training terminated after more than {} minutes' . format ( self . _minutes ) )
Stop the training if the training time exceeded self . _minutes .
36,497
def sanitize_url ( url : str ) -> str : for part in reversed ( url . split ( '/' ) ) : filename = re . sub ( r'[^a-zA-Z0-9_.\-]' , '' , part ) if len ( filename ) > 0 : break else : raise ValueError ( 'Could not create reasonable name for file from url %s' , url ) return filename
Sanitize the given url so that it can be used as a valid filename .
36,498
def _raise_check_aggregation ( aggregation : str ) : if aggregation not in ComputeStats . EXTRA_AGGREGATIONS and not hasattr ( np , aggregation ) : raise ValueError ( 'Aggregation `{}` is not a NumPy function or a member ' 'of EXTRA_AGGREGATIONS.' . format ( aggregation ) )
Check whether the given aggregation is present in NumPy or it is one of EXTRA_AGGREGATIONS .
36,499
def _compute_aggregation ( aggregation : str , data : Iterable [ Any ] ) : ComputeStats . _raise_check_aggregation ( aggregation ) if aggregation == 'nanfraction' : return np . sum ( np . isnan ( data ) ) / len ( data ) if aggregation == 'nancount' : return int ( np . sum ( np . isnan ( data ) ) ) return getattr ( np ,...
Compute the specified aggregation on the given data .