idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
23,200
def login ( token , apikey , username , password ) : if manual_login_success ( token , username , password ) : return if not apikey : if has_browser ( ) : apikey = wait_for_apikey ( ) else : floyd_logger . error ( "No browser found, please login manually by creating login key at %s/settings/apikey." , floyd . floyd_web_host ) sys . exit ( 1 ) if apikey : user = AuthClient ( ) . get_user ( apikey , is_apikey = True ) AuthConfigManager . set_apikey ( username = user . username , apikey = apikey ) floyd_logger . info ( "Login Successful as %s" , user . username ) else : floyd_logger . error ( "Login failed, please see --help for other login options." )
Login to FloydHub .
23,201
def check_cli_version ( ) : should_exit = False server_version = VersionClient ( ) . get_cli_version ( ) current_version = get_cli_version ( ) if LooseVersion ( current_version ) < LooseVersion ( server_version . min_version ) : print ( "\nYour version of CLI (%s) is no longer compatible with server." % current_version ) should_exit = True elif LooseVersion ( current_version ) < LooseVersion ( server_version . latest_version ) : print ( "\nNew version of CLI (%s) is now available." % server_version . latest_version ) else : return if should_exit and click . confirm ( '\nDo you want to upgrade to version %s now?' % server_version . latest_version ) : auto_upgrade ( ) sys . exit ( 0 ) else : msg_parts = [ ] msg_parts . append ( "\nTo manually upgrade run:" ) msg_parts . append ( " pip install -U floyd-cli" ) if is_conda_env ( ) : msg_parts . append ( "Or if you prefer to use conda:" ) msg_parts . append ( " conda install -y -c conda-forge -c floydhub floyd-cli" ) print ( "\n" . join ( msg_parts ) ) print ( "" ) if should_exit : sys . exit ( 0 )
Check if the current cli version satisfies the server requirements
23,202
def request ( self , method , url , params = None , data = None , files = None , json = None , timeout = 5 , headers = None , skip_auth = False ) : request_url = self . base_url + url floyd_logger . debug ( "Starting request to url: %s with params: %s, data: %s" , request_url , params , data ) request_headers = { 'x-floydhub-cli-version' : get_cli_version ( ) } if self . auth_header : request_headers [ "Authorization" ] = self . auth_header if headers : request_headers . update ( headers ) try : response = requests . request ( method , request_url , params = params , data = data , json = json , headers = request_headers , files = files , timeout = timeout ) except requests . exceptions . ConnectionError as exception : floyd_logger . debug ( "Exception: %s" , exception , exc_info = True ) sys . exit ( "Cannot connect to the Floyd server. Check your internet connection." ) except requests . exceptions . Timeout as exception : floyd_logger . debug ( "Exception: %s" , exception , exc_info = True ) sys . exit ( "Connection to FloydHub server timed out. Please retry or check your internet connection." ) floyd_logger . debug ( "Response Content: %s, Headers: %s" % ( response . content , response . headers ) ) self . check_response_status ( response ) return response
Execute the request using requests library
23,203
def download ( self , url , filename , relative = False , headers = None , timeout = 5 ) : request_url = self . base_url + url if relative else url floyd_logger . debug ( "Downloading file from url: {}" . format ( request_url ) ) request_headers = { } if self . auth_header : request_headers [ "Authorization" ] = self . auth_header if headers : request_headers . update ( headers ) try : response = requests . get ( request_url , headers = request_headers , timeout = timeout , stream = True ) self . check_response_status ( response ) with open ( filename , 'wb' ) as f : content_length = response . headers . get ( 'x-floydhub-content-length' ) if not content_length : content_length = response . headers . get ( 'content-length' ) if content_length : for chunk in progress . bar ( response . iter_content ( chunk_size = 1024 ) , expected_size = ( int ( content_length ) / 1024 ) + 1 ) : if chunk : f . write ( chunk ) else : for chunk in response . iter_content ( chunk_size = 1024 ) : if chunk : f . write ( chunk ) return filename except requests . exceptions . ConnectionError as exception : floyd_logger . debug ( "Exception: {}" . format ( exception ) ) sys . exit ( "Cannot connect to the Floyd server. Check your internet connection." )
Download the file from the given url at the current path
23,204
def download_tar ( self , url , untar = True , delete_after_untar = False , destination_dir = '.' ) : try : floyd_logger . info ( "Downloading the tar file to the current directory ..." ) filename = self . download ( url = url , filename = 'output.tar' ) if filename and untar : floyd_logger . info ( "Untarring the contents of the file ..." ) tar = tarfile . open ( filename ) tar . extractall ( path = destination_dir ) tar . close ( ) if delete_after_untar : floyd_logger . info ( "Cleaning up the tar file ..." ) os . remove ( filename ) return filename except FloydException as e : floyd_logger . info ( "Download URL ERROR! {}" . format ( e . message ) ) return False
Download and optionally untar the tar file from the given url
23,205
def check_response_status ( self , response ) : if not ( 200 <= response . status_code < 300 ) : try : message = response . json ( ) [ "errors" ] except Exception : message = None floyd_logger . debug ( "Error received : status_code: {}, message: {}" . format ( response . status_code , message or response . content ) ) if response . status_code == 400 : raise BadRequestException ( response ) elif response . status_code == 401 : raise AuthenticationException ( ) elif response . status_code == 403 : raise AuthorizationException ( response ) elif response . status_code == 404 : raise NotFoundException ( ) elif response . status_code == 429 : raise OverLimitException ( response . json ( ) . get ( "message" ) ) elif response . status_code == 502 : raise BadGatewayException ( ) elif response . status_code == 504 : raise GatewayTimeoutException ( ) elif response . status_code == 423 : raise LockedException ( ) elif 500 <= response . status_code < 600 : if 'Server under maintenance' in response . content . decode ( ) : raise ServerException ( 'Server under maintenance, please try again later.' ) else : raise ServerException ( ) else : msg = "An error occurred. Server response: {}" . format ( response . status_code ) raise FloydException ( message = msg )
Check if response is successful . Else raise Exception .
23,206
def cli ( verbose ) : floyd . floyd_host = floyd . floyd_web_host = "https://dev.floydhub.com" floyd . tus_server_endpoint = "https://upload-v2-dev.floydhub.com/api/v1/upload/" configure_logger ( verbose ) check_cli_version ( )
Floyd CLI interacts with FloydHub server and executes your commands . More help is available under each command listed below .
23,207
def get_unignored_file_paths ( ignore_list = None , whitelist = None ) : unignored_files = [ ] if ignore_list is None : ignore_list = [ ] if whitelist is None : whitelist = [ ] for root , dirs , files in os . walk ( "." ) : floyd_logger . debug ( "Root:%s, Dirs:%s" , root , dirs ) if ignore_path ( unix_style_path ( root ) , ignore_list , whitelist ) : dirs [ : ] = [ ] floyd_logger . debug ( "Ignoring directory : %s" , root ) continue for file_name in files : file_path = unix_style_path ( os . path . join ( root , file_name ) ) if ignore_path ( file_path , ignore_list , whitelist ) : floyd_logger . debug ( "Ignoring file : %s" , file_name ) continue unignored_files . append ( os . path . join ( root , file_name ) ) return unignored_files
Given an ignore_list and a whitelist of glob patterns returns the list of unignored file paths in the current directory and its subdirectories
23,208
def ignore_path ( path , ignore_list = None , whitelist = None ) : if ignore_list is None : return True should_ignore = matches_glob_list ( path , ignore_list ) if whitelist is None : return should_ignore return should_ignore and not matches_glob_list ( path , whitelist )
Returns a boolean indicating if a path should be ignored given an ignore_list and a whitelist of glob patterns .
23,209
def matches_glob_list ( path , glob_list ) : for glob in glob_list : try : if PurePath ( path ) . match ( glob ) : return True except TypeError : pass return False
Given a list of glob patterns returns a boolean indicating if a path matches any glob in the list
23,210
def get_files_in_current_directory ( file_type ) : local_files = [ ] total_file_size = 0 ignore_list , whitelist = FloydIgnoreManager . get_lists ( ) floyd_logger . debug ( "Ignoring: %s" , ignore_list ) floyd_logger . debug ( "Whitelisting: %s" , whitelist ) file_paths = get_unignored_file_paths ( ignore_list , whitelist ) for file_path in file_paths : local_files . append ( ( file_type , ( unix_style_path ( file_path ) , open ( file_path , 'rb' ) , 'text/plain' ) ) ) total_file_size += os . path . getsize ( file_path ) return ( local_files , total_file_size )
Gets the list of files in the current directory and subdirectories . Respects . floydignore file if present
23,211
def __get_nfiles_to_compress ( self ) : floyd_logger . info ( "Get number of files to compress... (this could take a few seconds)" ) paths = [ self . source_dir ] try : while paths : path = paths . pop ( ) for item in scandir ( path ) : if item . is_dir ( ) : paths . append ( item . path ) self . __files_to_compress += 1 elif item . is_file ( ) : self . __files_to_compress += 1 except OSError as e : if e . errno == errno . EACCES : self . source_dir = os . getcwd ( ) if self . source_dir == '.' else self . source_dir sys . exit ( ( "Permission denied. Make sure to have read permission " "for all the files and directories in the path: %s" ) % ( self . source_dir ) ) floyd_logger . info ( "Compressing %d files" , self . __files_to_compress )
Return the number of files to compress
23,212
def create_tarfile ( self ) : floyd_logger . info ( "Compressing data..." ) self . __compression_bar = ProgressBar ( expected_size = self . __files_to_compress , filled_char = '=' ) def dfilter_file_counter ( tarinfo ) : self . __compression_bar . show ( self . __files_compressed ) self . __files_compressed += 1 return tarinfo def warn_purge_exit ( info_msg , filename , progress_bar , exit_msg ) : progress_bar . done ( ) floyd_logger . info ( info_msg ) rmtree ( os . path . dirname ( filename ) ) sys . exit ( exit_msg ) try : signal . signal ( signal . SIGINT , signal . default_int_handler ) with tarfile . open ( self . filename , "w:gz" ) as tar : tar . add ( self . source_dir , arcname = os . path . basename ( self . source_dir ) , filter = dfilter_file_counter ) self . __compression_bar . done ( ) except ( OSError , IOError ) as e : if e . errno == errno . EACCES : self . source_dir = os . getcwd ( ) if self . source_dir == '.' else self . source_dir warn_purge_exit ( info_msg = "Permission denied. Removing compressed data..." , filename = self . filename , progress_bar = self . __compression_bar , exit_msg = ( "Permission denied. Make sure to have read permission " "for all the files and directories in the path: %s" ) % ( self . source_dir ) ) elif e . errno == errno . ENOSPC : dir_path = os . path . dirname ( self . filename ) warn_purge_exit ( info_msg = "No space left. Removing compressed data..." , filename = self . filename , progress_bar = self . __compression_bar , exit_msg = ( "No space left when compressing your data in: %s.\n" "Make sure to have enough space before uploading your data." ) % ( os . path . abspath ( dir_path ) ) ) except KeyboardInterrupt : warn_purge_exit ( info_msg = "Ctrl-C signal detected: Removing compressed data..." , filename = self . filename , progress_bar = self . __compression_bar , exit_msg = "Stopped the data upload gracefully." )
Create a tar file with the contents of the current directory
23,213
def create ( self , data ) : try : floyd_logger . info ( "Making create request to server..." ) post_body = data . to_dict ( ) post_body [ "resumable" ] = True response = self . request ( "POST" , self . url , json = post_body ) return response . json ( ) except BadRequestException as e : if 'Dataset not found, ID' in e . message : floyd_logger . error ( 'Data create: ERROR! Please run "floyd data init DATASET_NAME" before upload.' ) else : floyd_logger . error ( 'Data create: ERROR! %s' , e . message ) return None except FloydException as e : floyd_logger . error ( "Data create: ERROR! %s" , e . message ) return None
Create a temporary directory for the tar file that will be removed at the end of the operation .
23,214
def get_command_line ( instance_type , env , message , data , mode , open_notebook , command_str ) : floyd_command = [ "floyd" , "run" ] if instance_type : floyd_command . append ( '--' + INSTANCE_NAME_MAP [ instance_type ] ) if env and not env == DEFAULT_ENV : floyd_command += [ "--env" , env ] if message : floyd_command += [ "--message" , shell_quote ( message ) ] if data : for data_item in data : parts = data_item . split ( ':' ) if len ( parts ) > 1 : data_item = normalize_data_name ( parts [ 0 ] , use_data_config = False ) + ':' + parts [ 1 ] floyd_command += [ "--data" , data_item ] if mode and mode != "job" : floyd_command += [ "--mode" , mode ] if mode == 'jupyter' : if not open_notebook : floyd_command . append ( "--no-open" ) else : if command_str : floyd_command . append ( shell_quote ( command_str ) ) return ' ' . join ( floyd_command )
Return a string representing the full floyd command entered in the command line
23,215
def restart ( ctx , job_name , data , open_notebook , env , message , gpu , cpu , gpup , cpup , command ) : if len ( env ) > 1 : floyd_logger . error ( "You passed more than one environment: {}. Please specify a single environment." . format ( env ) ) sys . exit ( 1 ) env = env [ 0 ] parameters = { } expt_client = ExperimentClient ( ) try : job = expt_client . get ( normalize_job_name ( job_name ) ) except FloydException : job = expt_client . get ( job_name ) if gpu : instance_type = G1_INSTANCE_TYPE elif cpu : instance_type = C1_INSTANCE_TYPE else : instance_type = job . instance_type if instance_type is not None : parameters [ 'instance_type' ] = instance_type else : instance_type = job . instance_type if env is not None : arch = INSTANCE_ARCH_MAP [ instance_type ] if not validate_env ( env , arch ) : sys . exit ( 1 ) parameters [ 'env' ] = env success , data_ids , show_data_info = process_data_ids ( data ) if not success : sys . exit ( 1 ) if data_ids : parameters [ 'data_ids' ] = data_ids if message : parameters [ 'description' ] = message if command : parameters [ 'command' ] = ' ' . join ( command ) floyd_logger . info ( 'Restarting job %s...' , job_name ) new_job_info = expt_client . restart ( job . id , parameters = parameters ) if not new_job_info : floyd_logger . error ( "Failed to restart job" ) sys . exit ( 1 ) show_new_job_info ( expt_client , new_job_info [ 'name' ] , new_job_info , job . mode , open_notebook , show_data_info )
Restart a finished job as a new job .
23,216
def filter_user ( user , using = 'records' , interaction = None , part_of_week = 'allweek' , part_of_day = 'allday' ) : if using == 'recharges' : records = user . recharges else : records = user . records if interaction == 'callandtext' : records = filter ( lambda r : r . interaction in [ 'call' , 'text' ] , records ) elif interaction is not None : records = filter ( lambda r : r . interaction == interaction , records ) if part_of_week == 'weekday' : records = filter ( lambda r : r . datetime . isoweekday ( ) not in user . weekend , records ) elif part_of_week == 'weekend' : records = filter ( lambda r : r . datetime . isoweekday ( ) in user . weekend , records ) elif part_of_week != 'allweek' : raise KeyError ( "{} is not a valid value for part_of_week. it should be 'weekday', " "'weekend' or 'allweek'." . format ( part_of_week ) ) if user . night_start < user . night_end : night_filter = lambda r : user . night_end > r . datetime . time ( ) > user . night_start else : night_filter = lambda r : not ( user . night_end < r . datetime . time ( ) < user . night_start ) if part_of_day == 'day' : records = filter ( lambda r : not ( night_filter ( r ) ) , records ) elif part_of_day == 'night' : records = filter ( night_filter , records ) elif part_of_day != 'allday' : raise KeyError ( "{} is not a valid value for part_of_day. It should be 'day', 'night' or 'allday'." . format ( part_of_day ) ) return list ( records )
Filter records of a User objects by interaction part of week and day .
23,217
def positions_binning ( records ) : def get_key ( d ) : return ( d . year , d . day , d . hour , d . minute // 30 ) chunks = itertools . groupby ( records , key = lambda r : get_key ( r . datetime ) ) for _ , items in chunks : positions = [ i . position for i in items ] yield max ( positions , key = positions . count )
Bin records by chunks of 30 minutes returning the most prevalent position .
23,218
def _group_range ( records , method ) : start_date = records [ 0 ] . datetime end_date = records [ - 1 ] . datetime _fun = DATE_GROUPERS [ method ] d = start_date if method not in [ "month" , "year" ] : def increment ( i ) : return i + timedelta ( ** { method + 's' : 1 } ) elif method == "month" : def increment ( i ) : year , month = divmod ( i . month + 1 , 12 ) if month == 0 : month = 12 year = year - 1 return d . replace ( year = d . year + year , month = month ) elif method == "year" : def increment ( i ) : return d . replace ( year = d . year + 1 ) while _fun ( d ) <= _fun ( end_date ) : yield d d = increment ( d )
Yield the range of all dates between the extrema of a list of records separated by a given time delta .
23,219
def group_records ( records , groupby = 'week' ) : def _group_date ( records , _fun ) : for _ , chunk in itertools . groupby ( records , key = lambda r : _fun ( r . datetime ) ) : yield list ( chunk ) return _group_date ( records , DATE_GROUPERS [ groupby ] )
Group records by year month week or day .
23,220
def infer_type ( data ) : if isinstance ( data , ( type ( None ) , numbers . Number ) ) : return 'scalar' if isinstance ( data , SummaryStats ) : return 'summarystats' if hasattr ( data , "__len__" ) : data = [ x for x in data if x is not None ] if len ( data ) == 0 or isinstance ( data [ 0 ] , numbers . Number ) : return 'distribution_scalar' if isinstance ( data [ 0 ] , SummaryStats ) : return 'distribution_summarystats' raise TypeError ( "{} is not a valid input. It should be a number, a SummaryStats " "object, or None" . format ( data [ 0 ] ) ) raise TypeError ( "{} is not a valid input. It should be a number, a SummaryStats " "object, or a list" . format ( data ) )
Infer the type of objects returned by indicators .
23,221
def grouping ( f = None , interaction = [ 'call' , 'text' ] , summary = 'default' , user_kwd = False ) : if f is None : return partial ( grouping , user_kwd = user_kwd , interaction = interaction , summary = summary ) def wrapper ( user , groupby = 'week' , interaction = interaction , summary = summary , split_week = False , split_day = False , filter_empty = True , datatype = None , ** kwargs ) : if interaction is None : interaction = [ 'call' , 'text' ] parameters = divide_parameters ( split_week , split_day , interaction ) operations = { 'grouping' : { 'using' : 'records' , 'binning' : False , 'groupby' : groupby , 'filter_empty' : filter_empty , 'divide_by' : parameters } , 'apply' : { 'user_kwd' : user_kwd , 'summary' : summary , 'kwargs' : kwargs } } for i in parameters [ 'interaction' ] : if i not in [ 'callandtext' , 'call' , 'text' , 'location' ] : raise ValueError ( "%s is not a valid interaction value. Only " "'call', 'text', and 'location' are accepted." % i ) return _generic_wrapper ( f , user , operations , datatype ) return advanced_wrap ( f , wrapper )
grouping is a decorator for indicator functions used to simplify the source code .
23,222
def kurtosis ( data ) : if len ( data ) == 0 : return None num = moment ( data , 4 ) denom = moment ( data , 2 ) ** 2. return num / denom if denom != 0 else 0
Return the kurtosis for data .
23,223
def skewness ( data ) : if len ( data ) == 0 : return None num = moment ( data , 3 ) denom = moment ( data , 2 ) ** 1.5 return num / denom if denom != 0 else 0.
Returns the skewness of data .
23,224
def median ( data ) : if len ( data ) == 0 : return None data = sorted ( data ) return float ( ( data [ len ( data ) // 2 ] + data [ ( len ( data ) - 1 ) // 2 ] ) / 2. )
Return the median of numeric data unsing the mean of middle two method . If data is empty 0 is returned .
23,225
def entropy ( data ) : if len ( data ) == 0 : return None n = sum ( data ) _op = lambda f : f * math . log ( f ) return - sum ( _op ( float ( i ) / n ) for i in data )
Compute the Shannon entropy a measure of uncertainty .
23,226
def advanced_wrap ( f , wrapper ) : f_sig = list ( inspect . getargspec ( f ) ) wrap_sig = list ( inspect . getargspec ( wrapper ) ) if f_sig [ 3 ] is None or f_sig [ 3 ] == [ ] : f_sig [ 3 ] , f_kwargs = [ ] , [ ] else : f_kwargs = f_sig [ 0 ] [ - len ( f_sig [ 3 ] ) : ] for key , default in zip ( f_kwargs , f_sig [ 3 ] ) : wrap_sig [ 0 ] . append ( key ) wrap_sig [ 3 ] = wrap_sig [ 3 ] + ( default , ) wrap_sig [ 2 ] = None src = "lambda %s: " % ( inspect . formatargspec ( * wrap_sig ) [ 1 : - 1 ] ) new_args = inspect . formatargspec ( wrap_sig [ 0 ] , wrap_sig [ 1 ] , wrap_sig [ 2 ] , f_kwargs , formatvalue = lambda x : '=' + x ) src += 'wrapper%s\n' % new_args decorated = eval ( src , locals ( ) ) decorated . func = f return update_wrapper ( decorated , f )
Wrap a decorated function while keeping the same keyword arguments
23,227
def percent_records_missing_location ( user , method = None ) : if len ( user . records ) == 0 : return 0. missing_locations = sum ( [ 1 for record in user . records if record . position . _get_location ( user ) is None ] ) return float ( missing_locations ) / len ( user . records )
Return the percentage of records missing a location parameter .
23,228
def percent_overlapping_calls ( records , min_gab = 300 ) : calls = [ r for r in records if r . interaction == "call" ] if len ( calls ) == 0 : return 0. overlapping_calls = 0 for i , r in enumerate ( calls ) : if i <= len ( calls ) - 2 : if r . datetime + timedelta ( seconds = r . call_duration - min_gab ) >= calls [ i + 1 ] . datetime : overlapping_calls += 1 return ( float ( overlapping_calls ) / len ( calls ) )
Return the percentage of calls that overlap with the next call .
23,229
def antennas_missing_locations ( user , Method = None ) : unique_antennas = set ( [ record . position . antenna for record in user . records if record . position . antenna is not None ] ) return sum ( [ 1 for antenna in unique_antennas if user . antennas . get ( antenna ) is None ] )
Return the number of antennas missing locations in the records of a given user .
23,230
def bandicoot_code_signature ( ) : checksum = hashlib . sha1 ( ) for root , dirs , files in os . walk ( MAIN_DIRECTORY ) : for filename in sorted ( files ) : if not filename . endswith ( '.py' ) : continue f_path = os . path . join ( root , filename ) f_size = os . path . getsize ( f_path ) with open ( f_path , 'rb' ) as f : while f . tell ( ) != f_size : checksum . update ( f . read ( 0x40000 ) ) return checksum . hexdigest ( )
Returns a unique hash of the Python source code in the current bandicoot module using the cryptographic hash function SHA - 1 .
23,231
def supported ( cls , stream = sys . stdout ) : if not stream . isatty ( ) : return False try : import curses except ImportError : return False else : try : try : return curses . tigetnum ( "colors" ) > 2 except curses . error : curses . setupterm ( ) return curses . tigetnum ( "colors" ) > 2 except : raise return False
A class method that returns True if the current platform supports coloring terminal output using this method . Returns False otherwise .
23,232
def write ( self , text , color ) : color = self . _colors [ color ] self . stream . write ( '\x1b[{}m{}\x1b[0m' . format ( color , text ) )
Write the given text to the stream in the given color .
23,233
def percent_at_home ( positions , user ) : if not user . has_home : return None total_home = sum ( 1 for p in positions if p == user . home ) return float ( total_home ) / len ( positions ) if len ( positions ) != 0 else 0
The percentage of interactions the user had while he was at home .
23,234
def entropy_of_antennas ( positions , normalize = False ) : counter = Counter ( p for p in positions ) raw_entropy = entropy ( list ( counter . values ( ) ) ) n = len ( counter ) if normalize and n > 1 : return raw_entropy / math . log ( n ) else : return raw_entropy
The entropy of visited antennas .
23,235
def churn_rate ( user , summary = 'default' , ** kwargs ) : if len ( user . records ) == 0 : return statistics ( [ ] , summary = summary ) query = { 'groupby' : 'week' , 'divide_by' : OrderedDict ( [ ( 'part_of_week' , [ 'allweek' ] ) , ( 'part_of_day' , [ 'allday' ] ) ] ) , 'using' : 'records' , 'filter_empty' : True , 'binning' : True } rv = grouping_query ( user , query ) weekly_positions = rv [ 0 ] [ 1 ] all_positions = list ( set ( p for l in weekly_positions for p in l ) ) frequencies = { } cos_dist = [ ] for week , week_positions in enumerate ( weekly_positions ) : count = Counter ( week_positions ) total = sum ( count . values ( ) ) frequencies [ week ] = [ count . get ( p , 0 ) / total for p in all_positions ] all_indexes = range ( len ( all_positions ) ) for f_1 , f_2 in pairwise ( list ( frequencies . values ( ) ) ) : num = sum ( f_1 [ a ] * f_2 [ a ] for a in all_indexes ) denom_1 = sum ( f ** 2 for f in f_1 ) denom_2 = sum ( f ** 2 for f in f_2 ) cos_dist . append ( 1 - num / ( denom_1 ** .5 * denom_2 ** .5 ) ) return statistics ( cos_dist , summary = summary )
Computes the frequency spent at every towers each week and returns the distribution of the cosine similarity between two consecutives week .
23,236
def describe ( self ) : def format_int ( name , n ) : if n == 0 or n == 1 : return "%i %s" % ( n , name [ : - 1 ] ) else : return "%i %s" % ( n , name ) empty_box = Colors . OKGREEN + '[ ]' + Colors . ENDC + ' ' filled_box = Colors . OKGREEN + '[x]' + Colors . ENDC + ' ' if self . start_time is None : print ( empty_box + "No records stored" ) else : print ( ( filled_box + format_int ( "records" , len ( self . records ) ) + " from %s to %s" % ( self . start_time , self . end_time ) ) ) nb_contacts = bc . individual . number_of_contacts ( self , interaction = 'callandtext' , groupby = None ) nb_contacts = nb_contacts [ 'allweek' ] [ 'allday' ] [ 'callandtext' ] if nb_contacts : print ( filled_box + format_int ( "contacts" , nb_contacts ) ) else : print ( empty_box + "No contacts" ) if self . has_attributes : print ( filled_box + format_int ( "attributes" , len ( self . attributes ) ) ) else : print ( empty_box + "No attribute stored" ) if len ( self . antennas ) == 0 : print ( empty_box + "No antenna stored" ) else : print ( filled_box + format_int ( "antennas" , len ( self . antennas ) ) ) if self . has_recharges : print ( filled_box + format_int ( "recharges" , len ( self . recharges ) ) ) else : print ( empty_box + "No recharges" ) if self . has_home : print ( filled_box + "Has home" ) else : print ( empty_box + "No home" ) if self . has_text : print ( filled_box + "Has texts" ) else : print ( empty_box + "No texts" ) if self . has_call : print ( filled_box + "Has calls" ) else : print ( empty_box + "No calls" ) if self . has_network : print ( filled_box + "Has network" ) else : print ( empty_box + "No network" )
Generates a short description of the object and writes it to the standard output .
23,237
def recompute_home ( self ) : if self . night_start < self . night_end : night_filter = lambda r : self . night_end > r . datetime . time ( ) > self . night_start else : night_filter = lambda r : not ( self . night_end < r . datetime . time ( ) < self . night_start ) candidates = list ( positions_binning ( filter ( night_filter , self . _records ) ) ) if len ( candidates ) == 0 : self . home = None else : self . home = Counter ( candidates ) . most_common ( ) [ 0 ] [ 0 ] self . reset_cache ( ) return self . home
Return the antenna where the user spends most of his time at night . None is returned if there are no candidates for a home antenna
23,238
def set_home ( self , new_home ) : if type ( new_home ) is Position : self . home = new_home elif type ( new_home ) is tuple : self . home = Position ( location = new_home ) else : self . home = Position ( antenna = new_home ) self . reset_cache ( )
Sets the user s home . The argument can be a Position object or a tuple containing location data .
23,239
def interevent_time_recharges ( recharges ) : time_pairs = pairwise ( r . datetime for r in recharges ) times = [ ( new - old ) . total_seconds ( ) for old , new in time_pairs ] return summary_stats ( times )
Return the distribution of time between consecutive recharges of the user .
23,240
def percent_pareto_recharges ( recharges , percentage = 0.8 ) : amounts = sorted ( [ r . amount for r in recharges ] , reverse = True ) total_sum = sum ( amounts ) partial_sum = 0 for count , a in enumerate ( amounts ) : partial_sum += a if partial_sum >= percentage * total_sum : break return ( count + 1 ) / len ( recharges )
Percentage of recharges that account for 80% of total recharged amount .
23,241
def average_balance_recharges ( user , ** kwargs ) : balance = 0 for r1 , r2 in pairwise ( user . recharges ) : balance += r1 . amount * min ( 1 , ( r2 . datetime - r1 . datetime ) . days ) / 2 first_recharge = user . recharges [ 0 ] last_recharge = user . recharges [ - 1 ] duration = ( last_recharge . datetime - first_recharge . datetime ) . days return balance / min ( 1 , duration )
Return the average daily balance estimated from all recharges . We assume a linear usage between two recharges and an empty balance before a recharge .
23,242
def _round_half_hour ( record ) : k = record . datetime + timedelta ( minutes = - ( record . datetime . minute % 30 ) ) return datetime ( k . year , k . month , k . day , k . hour , k . minute , 0 )
Round a time DOWN to half nearest half - hour .
23,243
def matrix_index ( user ) : other_keys = sorted ( [ k for k in user . network . keys ( ) if k != user . name ] ) return [ user . name ] + other_keys
Returns the keys associated with each axis of the matrices .
23,244
def matrix_directed_unweighted ( user ) : matrix = _interaction_matrix ( user , interaction = None ) for a in range ( len ( matrix ) ) : for b in range ( len ( matrix ) ) : if matrix [ a ] [ b ] is not None and matrix [ a ] [ b ] > 0 : matrix [ a ] [ b ] = 1 return matrix
Returns a directed unweighted matrix where an edge exists if there is at least one call or text .
23,245
def matrix_undirected_weighted ( user , interaction = None ) : matrix = _interaction_matrix ( user , interaction = interaction ) result = [ [ 0 for _ in range ( len ( matrix ) ) ] for _ in range ( len ( matrix ) ) ] for a in range ( len ( matrix ) ) : for b in range ( len ( matrix ) ) : if a != b and matrix [ a ] [ b ] and matrix [ b ] [ a ] : result [ a ] [ b ] = matrix [ a ] [ b ] + matrix [ b ] [ a ] elif matrix [ a ] [ b ] is None or matrix [ b ] [ a ] is None : result [ a ] [ b ] = None else : result [ a ] [ b ] = 0 return result
Returns an undirected weighted matrix for call text and call duration where an edge exists if the relationship is reciprocated .
23,246
def matrix_undirected_unweighted ( user ) : matrix = matrix_undirected_weighted ( user , interaction = None ) for a , b in combinations ( range ( len ( matrix ) ) , 2 ) : if matrix [ a ] [ b ] is None or matrix [ b ] [ a ] is None : continue if matrix [ a ] [ b ] > 0 and matrix [ b ] [ a ] > 0 : matrix [ a ] [ b ] , matrix [ b ] [ a ] = 1 , 1 return matrix
Returns an undirected unweighted matrix where an edge exists if the relationship is reciprocated .
23,247
def clustering_coefficient_unweighted ( user ) : matrix = matrix_undirected_unweighted ( user ) closed_triplets = 0 for a , b in combinations ( range ( len ( matrix ) ) , 2 ) : a_b , a_c , b_c = matrix [ a ] [ b ] , matrix [ a ] [ 0 ] , matrix [ b ] [ 0 ] if a_b is None or a_c is None or b_c is None : continue if a_b > 0 and a_c > 0 and b_c > 0 : closed_triplets += 1. d_ego = sum ( matrix [ 0 ] ) return 2 * closed_triplets / ( d_ego * ( d_ego - 1 ) ) if d_ego > 1 else 0
The clustering coefficient of the user in the unweighted undirected ego network .
23,248
def clustering_coefficient_weighted ( user , interaction = None ) : matrix = matrix_undirected_weighted ( user , interaction = interaction ) weights = [ weight for g in matrix for weight in g if weight is not None ] if len ( weights ) == 0 : return None max_weight = max ( weights ) triplet_weight = 0 for a , b in combinations ( range ( len ( matrix ) ) , 2 ) : a_b , a_c , b_c = matrix [ a ] [ b ] , matrix [ a ] [ 0 ] , matrix [ b ] [ 0 ] if a_b is None or a_c is None or b_c is None : continue if a_b and a_c and b_c : triplet_weight += ( a_b * a_c * b_c ) ** ( 1 / 3 ) / max_weight d_ego = sum ( 1 for i in matrix [ 0 ] if i > 0 ) return 2 * triplet_weight / ( d_ego * ( d_ego - 1 ) ) if d_ego > 1 else 0
The clustering coefficient of the user s weighted undirected network .
23,249
def assortativity_indicators ( user ) : matrix = matrix_undirected_unweighted ( user ) count_indicator = defaultdict ( int ) total_indicator = defaultdict ( int ) ego_indics = all ( user , flatten = True ) ego_indics = { a : value for a , value in ego_indics . items ( ) if a != "name" and a [ : 11 ] != "reporting__" and a [ : 10 ] != "attributes" } for i , u_name in enumerate ( matrix_index ( user ) ) : correspondent = user . network . get ( u_name , None ) if correspondent is None or u_name == user . name or matrix [ 0 ] [ i ] == 0 : continue neighbor_indics = all ( correspondent , flatten = True ) for a in ego_indics : if ego_indics [ a ] is not None and neighbor_indics [ a ] is not None : total_indicator [ a ] += 1 count_indicator [ a ] += ( ego_indics [ a ] - neighbor_indics [ a ] ) ** 2 assortativity = { } for i in count_indicator : assortativity [ i ] = count_indicator [ i ] / total_indicator [ i ] return assortativity
Computes the assortativity of indicators .
23,250
def assortativity_attributes ( user ) : matrix = matrix_undirected_unweighted ( user ) neighbors = [ k for k in user . network . keys ( ) if k != user . name ] neighbors_attrbs = { } for i , u_name in enumerate ( matrix_index ( user ) ) : correspondent = user . network . get ( u_name , None ) if correspondent is None or u_name == user . name or matrix [ 0 ] [ i ] == 0 : continue if correspondent . has_attributes : neighbors_attrbs [ correspondent . name ] = correspondent . attributes assortativity = { } for a in user . attributes : total = sum ( 1 for n in neighbors if n in neighbors_attrbs and user . attributes [ a ] == neighbors_attrbs [ n ] [ a ] ) den = sum ( 1 for n in neighbors if n in neighbors_attrbs ) assortativity [ a ] = total / den if den != 0 else None return assortativity
Computes the assortativity of the nominal attributes .
23,251
def network_sampling ( n , filename , directory = None , snowball = False , user = None ) : if snowball : if user is None : raise ValueError ( "Must specify a starting user from whom to initiate the snowball" ) else : users , agenda = [ user ] , [ user ] while len ( agenda ) > 0 : parent = agenda . pop ( ) dealphebetized_network = sorted ( parent . network . items ( ) , key = lambda k : random . random ( ) ) for neighbor in dealphebetized_network : if neighbor [ 1 ] not in users and neighbor [ 1 ] is not None and len ( users ) < n : users . append ( neighbor [ 1 ] ) if neighbor [ 1 ] . network : agenda . push ( neighbor [ 1 ] ) else : files = [ x for x in os . listdir ( directory ) if os . path . isfile ( os . path . join ( directory , x ) ) ] shuffled_files = sorted ( files , key = lambda k : random . random ( ) ) user_names = shuffled_files [ : n ] users = [ bc . read_csv ( u [ : - 4 ] , directory ) for u in user_names ] if len ( users ) < n : raise ValueError ( "Specified more users than records that exist, only {} records available" . format ( len ( users ) ) ) bc . to_csv ( [ bc . utils . all ( u ) for u in users ] , filename )
Selects a few users and exports a CSV of indicators for them .
23,252
def export ( user , directory = None , warnings = True ) : current_file = os . path . realpath ( __file__ ) current_path = os . path . dirname ( current_file ) dashboard_path = os . path . join ( current_path , 'dashboard_src' ) if directory : dirpath = directory else : dirpath = tempfile . mkdtemp ( ) copy_tree ( dashboard_path + '/public' , dirpath , update = 1 ) data = user_data ( user ) bc . io . to_json ( data , dirpath + '/data/bc_export.json' , warnings = False ) if warnings : print ( "Successfully exported the visualization to %s" % dirpath ) return dirpath
Build a temporary directory with the visualization . Returns the local path where files have been written .
23,253
def run ( user , port = 4242 ) : owd = os . getcwd ( ) dir = export ( user ) os . chdir ( dir ) Handler = SimpleHTTPServer . SimpleHTTPRequestHandler try : httpd = SocketServer . TCPServer ( ( "" , port ) , Handler ) print ( "Serving bandicoot visualization at http://0.0.0.0:%i" % port ) httpd . serve_forever ( ) except KeyboardInterrupt : print ( "^C received, shutting down the web server" ) httpd . server_close ( ) finally : os . chdir ( owd )
Build a temporary directory with a visualization and serve it over HTTP .
23,254
def to_csv ( objects , filename , digits = 5 , warnings = True ) : if not isinstance ( objects , list ) : objects = [ objects ] data = [ flatten ( obj ) for obj in objects ] all_keys = [ d for datum in data for d in datum . keys ( ) ] field_names = sorted ( set ( all_keys ) , key = lambda x : all_keys . index ( x ) ) with open ( filename , 'w' ) as f : w = csv . writer ( f ) w . writerow ( field_names ) def make_repr ( item ) : if item is None : return None elif isinstance ( item , float ) : return repr ( round ( item , digits ) ) else : return str ( item ) for row in data : row = dict ( ( k , make_repr ( v ) ) for k , v in row . items ( ) ) w . writerow ( [ make_repr ( row . get ( k , None ) ) for k in field_names ] ) if warnings : print ( "Successfully exported {} object(s) to {}" . format ( len ( objects ) , filename ) )
Export the flatten indicators of one or several users to CSV .
23,255
def to_json ( objects , filename , warnings = True ) : if not isinstance ( objects , list ) : objects = [ objects ] obj_dict = OrderedDict ( [ ( obj [ 'name' ] , obj ) for obj in objects ] ) with open ( filename , 'w' ) as f : f . write ( dumps ( obj_dict , indent = 4 , separators = ( ',' , ': ' ) ) ) if warnings : print ( "Successfully exported {} object(s) to {}" . format ( len ( objects ) , filename ) )
Export the indicators of one or several users to JSON .
23,256
def _parse_record ( data , duration_format = 'seconds' ) : def _map_duration ( s ) : if s == '' : return None elif duration_format . lower ( ) == 'seconds' : return int ( s ) else : t = time . strptime ( s , duration_format ) return 3600 * t . tm_hour + 60 * t . tm_min + t . tm_sec def _map_position ( data ) : antenna = Position ( ) if 'antenna_id' in data and data [ 'antenna_id' ] : antenna . antenna = data [ 'antenna_id' ] if 'place_id' in data : raise NameError ( "Use field name 'antenna_id' in input files. " "'place_id' is deprecated." ) if 'latitude' in data and 'longitude' in data : latitude = data [ 'latitude' ] longitude = data [ 'longitude' ] if latitude and longitude : antenna . location = float ( latitude ) , float ( longitude ) return antenna return Record ( interaction = data [ 'interaction' ] if data [ 'interaction' ] else None , direction = data [ 'direction' ] , correspondent_id = data [ 'correspondent_id' ] , datetime = _tryto ( lambda x : datetime . strptime ( x , "%Y-%m-%d %H:%M:%S" ) , data [ 'datetime' ] ) , call_duration = _tryto ( _map_duration , data [ 'call_duration' ] ) , position = _tryto ( _map_position , data ) )
Parse a raw data dictionary and return a Record object .
23,257
def filter_record ( records ) : def scheme ( r ) : if r . interaction is None : call_duration_ok = True elif r . interaction == 'call' : call_duration_ok = isinstance ( r . call_duration , ( int , float ) ) else : call_duration_ok = True callandtext = r . interaction in [ 'call' , 'text' ] not_callandtext = not callandtext return { 'interaction' : r . interaction in [ 'call' , 'text' , 'gps' , None ] , 'direction' : ( not_callandtext and r . direction is None ) or r . direction in [ 'in' , 'out' ] , 'correspondent_id' : not_callandtext or ( r . correspondent_id not in [ None , '' ] ) , 'datetime' : isinstance ( r . datetime , datetime ) , 'call_duration' : call_duration_ok , 'location' : callandtext or r . position . type ( ) is not None } ignored = OrderedDict ( [ ( 'all' , 0 ) , ( 'interaction' , 0 ) , ( 'direction' , 0 ) , ( 'correspondent_id' , 0 ) , ( 'datetime' , 0 ) , ( 'call_duration' , 0 ) , ( 'location' , 0 ) , ] ) bad_records = [ ] def _filter ( records ) : for r in records : valid = True for key , valid_key in scheme ( r ) . items ( ) : if not valid_key : ignored [ key ] += 1 bad_records . append ( r ) valid = False if valid : yield r else : ignored [ 'all' ] += 1 return list ( _filter ( records ) ) , ignored , bad_records
Filter records and remove items with missing or inconsistent fields
23,258
def read_csv ( user_id , records_path , antennas_path = None , attributes_path = None , recharges_path = None , network = False , duration_format = 'seconds' , describe = True , warnings = True , errors = False , drop_duplicates = False ) : antennas = None if antennas_path is not None : try : with open ( antennas_path , 'r' ) as csv_file : reader = csv . DictReader ( csv_file ) antennas = dict ( ( d [ 'antenna_id' ] , ( float ( d [ 'latitude' ] ) , float ( d [ 'longitude' ] ) ) ) for d in reader ) except IOError : pass user_records = os . path . join ( records_path , user_id + '.csv' ) with open ( user_records , 'r' ) as csv_file : reader = csv . DictReader ( csv_file ) records = [ _parse_record ( r , duration_format ) for r in reader ] attributes = None if attributes_path is not None : user_attributes = os . path . join ( attributes_path , user_id + '.csv' ) attributes = _load_attributes ( user_attributes ) recharges = None if recharges_path is not None : user_recharges = os . path . join ( recharges_path , user_id + '.csv' ) recharges = _load_recharges ( user_recharges ) user , bad_records = load ( user_id , records , antennas , attributes , recharges , antennas_path , attributes_path , recharges_path , describe = False , warnings = warnings , drop_duplicates = drop_duplicates ) if network is True : user . network = _read_network ( user , records_path , attributes_path , read_csv , antennas_path , warnings , drop_duplicates = drop_duplicates ) user . recompute_missing_neighbors ( ) if describe : user . describe ( ) if errors : return user , bad_records return user
Load user records from a CSV file .
23,259
def interevent_time ( records ) : inter_events = pairwise ( r . datetime for r in records ) inter = [ ( new - old ) . total_seconds ( ) for old , new in inter_events ] return summary_stats ( inter )
The interevent time between two records of the user .
23,260
def number_of_contacts ( records , direction = None , more = 0 ) : if direction is None : counter = Counter ( r . correspondent_id for r in records ) else : counter = Counter ( r . correspondent_id for r in records if r . direction == direction ) return sum ( 1 for d in counter . values ( ) if d > more )
The number of contacts the user interacted with .
23,261
def entropy_of_contacts ( records , normalize = False ) : counter = Counter ( r . correspondent_id for r in records ) raw_entropy = entropy ( counter . values ( ) ) n = len ( counter ) if normalize and n > 1 : return raw_entropy / math . log ( n ) else : return raw_entropy
The entropy of the user s contacts .
23,262
def interactions_per_contact ( records , direction = None ) : if direction is None : counter = Counter ( r . correspondent_id for r in records ) else : counter = Counter ( r . correspondent_id for r in records if r . direction == direction ) return summary_stats ( counter . values ( ) )
The number of interactions a user had with each of its contacts .
23,263
def percent_initiated_interactions ( records , user ) : if len ( records ) == 0 : return 0 initiated = sum ( 1 for r in records if r . direction == 'out' ) return initiated / len ( records )
The percentage of calls initiated by the user .
23,264
def percent_nocturnal ( records , user ) : if len ( records ) == 0 : return 0 if user . night_start < user . night_end : night_filter = lambda d : user . night_end > d . time ( ) > user . night_start else : night_filter = lambda d : not ( user . night_end < d . time ( ) < user . night_start ) return sum ( 1 for r in records if night_filter ( r . datetime ) ) / len ( records )
The percentage of interactions the user had at night .
23,265
def call_duration ( records , direction = None ) : if direction is None : call_durations = [ r . call_duration for r in records ] else : call_durations = [ r . call_duration for r in records if r . direction == direction ] return summary_stats ( call_durations )
The duration of the user s calls .
23,266
def _conversations ( group , delta = datetime . timedelta ( hours = 1 ) ) : last_time = None results = [ ] for g in group : if last_time is None or g . datetime - last_time < delta : if g . interaction == 'text' : results . append ( g ) else : if len ( results ) != 0 : yield results results = [ ] else : if len ( results ) != 0 : yield results if g . interaction == 'call' : results = [ ] else : results = [ g ] last_time = g . datetime if len ( results ) != 0 : yield results
Group texts into conversations . The function returns an iterator over records grouped by conversations .
23,267
def percent_initiated_conversations ( records ) : interactions = defaultdict ( list ) for r in records : interactions [ r . correspondent_id ] . append ( r ) def _percent_initiated ( grouped ) : mapped = [ ( 1 if conv [ 0 ] . direction == 'out' else 0 , 1 ) for conv in _conversations ( grouped ) ] return mapped all_couples = [ sublist for i in interactions . values ( ) for sublist in _percent_initiated ( i ) ] if len ( all_couples ) == 0 : init , total = 0 , 0 else : init , total = list ( map ( sum , list ( zip ( * all_couples ) ) ) ) return init / total if total != 0 else 0
The percentage of conversations that have been initiated by the user .
23,268
def active_days ( records ) : days = set ( r . datetime . date ( ) for r in records ) return len ( days )
The number of days during which the user was active . A user is considered active if he sends a text receives a text initiates a call receives a call or has a mobility point .
23,269
def percent_pareto_interactions ( records , percentage = 0.8 ) : if len ( records ) == 0 : return None user_count = Counter ( r . correspondent_id for r in records ) target = int ( math . ceil ( sum ( user_count . values ( ) ) * percentage ) ) user_sort = sorted ( user_count . keys ( ) , key = lambda x : user_count [ x ] ) while target > 0 and len ( user_sort ) > 0 : user_id = user_sort . pop ( ) target -= user_count [ user_id ] return ( len ( user_count ) - len ( user_sort ) ) / len ( records )
The percentage of user s contacts that account for 80% of its interactions .
23,270
def number_of_interactions ( records , direction = None ) : if direction is None : return len ( records ) else : return len ( [ r for r in records if r . direction == direction ] )
The number of interactions .
23,271
def to_csv ( weekmatrices , filename , digits = 5 ) : with open ( filename , 'w' ) as f : w = csv . writer ( f , lineterminator = '\n' ) w . writerow ( [ 'year_week' , 'channel' , 'weekday' , 'section' , 'value' ] ) def make_repr ( item ) : if item is None : return None elif isinstance ( item , float ) : return repr ( round ( item , digits ) ) else : return str ( item ) for row in weekmatrices : w . writerow ( [ make_repr ( item ) for item in row ] )
Exports a list of week - matrices to a specified filename in the CSV format .
23,272
def read_csv ( filename ) : with open ( filename , 'r' ) as f : r = csv . reader ( f ) next ( r ) wm = list ( r ) for i , row in enumerate ( wm ) : row [ 1 : 4 ] = map ( int , row [ 1 : 4 ] ) row [ 4 ] = float ( row [ 4 ] ) return wm
Read a list of week - matrices from a CSV file .
23,273
def _extract_list_from_generator ( generator ) : extracted = [ ] for i in generator : extracted . append ( list ( i ) ) return extracted
Iterates over a generator to extract all the objects and add them to a list . Useful when the objects have to be used multiple times .
23,274
def _seconds_to_section_split ( record , sections ) : next_section = sections [ bisect_right ( sections , _find_weektime ( record . datetime ) ) ] * 60 return next_section - _find_weektime ( record . datetime , time_type = 'sec' )
Finds the seconds to the next section from the datetime of a record .
23,275
def get_neighbors ( distance_matrix , source , eps ) : return [ dest for dest , distance in enumerate ( distance_matrix [ source ] ) if distance < eps ]
Given a matrix of distance between couples of points return the list of every point closer than eps from a certain point .
23,276
def fix_location ( records , max_elapsed_seconds = 300 ) : groups = itertools . groupby ( records , lambda r : r . direction ) groups = [ ( interaction , list ( g ) ) for interaction , g in groups ] def tdist ( t1 , t2 ) : return abs ( ( t1 - t2 ) . total_seconds ( ) ) for i , ( interaction , g ) in enumerate ( groups ) : if interaction == 'in' : continue prev_gps = groups [ i - 1 ] [ 1 ] [ - 1 ] next_gps = groups [ i + 1 ] [ 1 ] [ 0 ] for r in g : if tdist ( r . datetime , prev_gps . datetime ) <= max_elapsed_seconds : r . position = prev_gps . position elif tdist ( r . datetime , next_gps . datetime ) <= max_elapsed_seconds : r . position = next_gps . position
Update position of all records based on the position of the closest GPS record .
23,277
def fetch ( cert , issuer , hash_algo = 'sha1' , nonce = True , user_agent = None , timeout = 10 ) : if not isinstance ( cert , x509 . Certificate ) : raise TypeError ( 'cert must be an instance of asn1crypto.x509.Certificate, not %s' % type_name ( cert ) ) if not isinstance ( issuer , x509 . Certificate ) : raise TypeError ( 'issuer must be an instance of asn1crypto.x509.Certificate, not %s' % type_name ( issuer ) ) if hash_algo not in set ( [ 'sha1' , 'sha256' ] ) : raise ValueError ( 'hash_algo must be one of "sha1", "sha256", not %s' % repr ( hash_algo ) ) if not isinstance ( nonce , bool ) : raise TypeError ( 'nonce must be a bool, not %s' % type_name ( nonce ) ) if user_agent is None : user_agent = 'certvalidator %s' % __version__ elif not isinstance ( user_agent , str_cls ) : raise TypeError ( 'user_agent must be a unicode string, not %s' % type_name ( user_agent ) ) cert_id = ocsp . CertId ( { 'hash_algorithm' : algos . DigestAlgorithm ( { 'algorithm' : hash_algo } ) , 'issuer_name_hash' : getattr ( cert . issuer , hash_algo ) , 'issuer_key_hash' : getattr ( issuer . public_key , hash_algo ) , 'serial_number' : cert . serial_number , } ) request = ocsp . Request ( { 'req_cert' : cert_id , } ) tbs_request = ocsp . TBSRequest ( { 'request_list' : ocsp . Requests ( [ request ] ) , } ) if nonce : nonce_extension = ocsp . TBSRequestExtension ( { 'extn_id' : 'nonce' , 'critical' : False , 'extn_value' : core . OctetString ( core . OctetString ( os . urandom ( 16 ) ) . dump ( ) ) } ) tbs_request [ 'request_extensions' ] = ocsp . TBSRequestExtensions ( [ nonce_extension ] ) ocsp_request = ocsp . OCSPRequest ( { 'tbs_request' : tbs_request , } ) last_e = None for ocsp_url in cert . ocsp_urls : try : request = Request ( ocsp_url ) request . add_header ( 'Accept' , 'application/ocsp-response' ) request . add_header ( 'Content-Type' , 'application/ocsp-request' ) request . add_header ( 'User-Agent' , user_agent ) response = urlopen ( request , ocsp_request . dump ( ) , timeout ) ocsp_response = ocsp . OCSPResponse . load ( response . read ( ) ) request_nonce = ocsp_request . nonce_value response_nonce = ocsp_response . nonce_value if request_nonce and response_nonce and request_nonce . native != response_nonce . native : raise errors . OCSPValidationError ( 'Unable to verify OCSP response since the request and response nonces do not match' ) return ocsp_response except ( URLError ) as e : last_e = e raise last_e
Fetches an OCSP response for a certificate
23,278
def _walk_issuers ( self , path , paths , failed_paths ) : if path . first . signature in self . _ca_lookup : paths . append ( path ) return new_branches = 0 for issuer in self . _possible_issuers ( path . first ) : try : self . _walk_issuers ( path . copy ( ) . prepend ( issuer ) , paths , failed_paths ) new_branches += 1 except ( DuplicateCertificateError ) : pass if not new_branches : failed_paths . append ( path )
Recursively looks through the list of known certificates for the issuer of the certificate specified stopping once the certificate in question is one contained within the CA certs list
23,279
def _possible_issuers ( self , cert ) : issuer_hashable = cert . issuer . hashable if issuer_hashable not in self . _subject_map : return for issuer in self . _subject_map [ issuer_hashable ] : if cert . authority_key_identifier and issuer . key_identifier : if cert . authority_key_identifier != issuer . key_identifier : continue elif cert . authority_issuer_serial : if cert . authority_issuer_serial != issuer . issuer_serial : continue yield issuer
Returns a generator that will list all possible issuers for the cert
23,280
def find_issuer ( self , cert ) : for entry in self : if entry . subject == cert . issuer : if entry . key_identifier and cert . authority_key_identifier : if entry . key_identifier == cert . authority_key_identifier : return entry else : return entry raise LookupError ( 'Unable to find the issuer of the certificate specified' )
Return the issuer of the cert specified as defined by this path
23,281
def truncate_to ( self , cert ) : cert_index = None for index , entry in enumerate ( self ) : if entry . issuer_serial == cert . issuer_serial : cert_index = index break if cert_index is None : raise LookupError ( 'Unable to find the certificate specified' ) while len ( self ) > cert_index + 1 : self . pop ( ) return self
Remove all certificates in the path after the cert specified
23,282
def truncate_to_issuer ( self , cert ) : issuer_index = None for index , entry in enumerate ( self ) : if entry . subject == cert . issuer : if entry . key_identifier and cert . authority_key_identifier : if entry . key_identifier == cert . authority_key_identifier : issuer_index = index break else : issuer_index = index break if issuer_index is None : raise LookupError ( 'Unable to find the issuer of the certificate specified' ) while len ( self ) > issuer_index + 1 : self . pop ( ) return self
Remove all certificates in the path after the issuer of the cert specified as defined by this path
23,283
def copy ( self ) : copy = self . __class__ ( ) copy . _certs = self . _certs [ : ] copy . _cert_hashes = self . _cert_hashes . copy ( ) return copy
Creates a copy of this path
23,284
def pop ( self ) : last_cert = self . _certs . pop ( ) self . _cert_hashes . remove ( last_cert . issuer_serial ) return self
Removes the last certificate from the path
23,285
def fetch ( cert , use_deltas = True , user_agent = None , timeout = 10 ) : if not isinstance ( cert , x509 . Certificate ) : raise TypeError ( 'cert must be an instance of asn1crypto.x509.Certificate, not %s' % type_name ( cert ) ) if user_agent is None : user_agent = 'certvalidator %s' % __version__ elif not isinstance ( user_agent , str_cls ) : raise TypeError ( 'user_agent must be a unicode string, not %s' % type_name ( user_agent ) ) output = [ ] sources = cert . crl_distribution_points if use_deltas : sources . extend ( cert . delta_crl_distribution_points ) for distribution_point in sources : url = distribution_point . url output . append ( _grab_crl ( user_agent , url , timeout ) ) return output
Fetches the CRLs for a certificate
23,286
def _grab_crl ( user_agent , url , timeout ) : request = Request ( url ) request . add_header ( 'Accept' , 'application/pkix-crl' ) request . add_header ( 'User-Agent' , user_agent ) response = urlopen ( request , None , timeout ) data = response . read ( ) if pem . detect ( data ) : _ , _ , data = pem . unarmor ( data ) return crl . CertificateList . load ( data )
Fetches a CRL and parses it
23,287
def fetch_certs ( certificate_list , user_agent = None , timeout = 10 ) : output = [ ] if user_agent is None : user_agent = 'certvalidator %s' % __version__ elif not isinstance ( user_agent , str_cls ) : raise TypeError ( 'user_agent must be a unicode string, not %s' % type_name ( user_agent ) ) for url in certificate_list . issuer_cert_urls : request = Request ( url ) request . add_header ( 'Accept' , 'application/pkix-cert,application/pkcs7-mime' ) request . add_header ( 'User-Agent' , user_agent ) response = urlopen ( request , None , timeout ) content_type = response . headers [ 'Content-Type' ] . strip ( ) response_data = response . read ( ) if content_type == 'application/pkix-cert' : output . append ( x509 . Certificate . load ( response_data ) ) elif content_type == 'application/pkcs7-mime' : signed_data = cms . SignedData . load ( response_data ) if isinstance ( signed_data [ 'certificates' ] , cms . CertificateSet ) : for cert_choice in signed_data [ 'certificates' ] : if cert_choice . name == 'certificate' : output . append ( cert_choice . chosen ) else : raise ValueError ( 'Unknown content type of %s when fetching issuer certificate for CRL' % repr ( content_type ) ) return output
Fetches certificates from the authority information access extension of an asn1crypto . crl . CertificateList object and places them into the cert registry .
23,288
def validate_usage ( self , key_usage , extended_key_usage = None , extended_optional = False ) : self . _validate_path ( ) validate_usage ( self . _context , self . _certificate , key_usage , extended_key_usage , extended_optional ) return self . _path
Validates the certificate path and that the certificate is valid for the key usage and extended key usage purposes specified .
23,289
def validate_tls ( self , hostname ) : self . _validate_path ( ) validate_tls_hostname ( self . _context , self . _certificate , hostname ) return self . _path
Validates the certificate path that the certificate is valid for the hostname provided and that the certificate is valid for the purpose of a TLS connection .
23,290
def crls ( self ) : if not self . _allow_fetching : return self . _crls output = [ ] for issuer_serial in self . _fetched_crls : output . extend ( self . _fetched_crls [ issuer_serial ] ) return output
A list of all cached asn1crypto . crl . CertificateList objects
23,291
def ocsps ( self ) : if not self . _allow_fetching : return self . _ocsps output = [ ] for issuer_serial in self . _fetched_ocsps : output . extend ( self . _fetched_ocsps [ issuer_serial ] ) return output
A list of all cached asn1crypto . ocsp . OCSPResponse objects
23,292
def _extract_ocsp_certs ( self , ocsp_response ) : status = ocsp_response [ 'response_status' ] . native if status == 'successful' : response_bytes = ocsp_response [ 'response_bytes' ] if response_bytes [ 'response_type' ] . native == 'basic_ocsp_response' : response = response_bytes [ 'response' ] . parsed if response [ 'certs' ] : for other_cert in response [ 'certs' ] : if self . certificate_registry . add_other_cert ( other_cert ) : self . _revocation_certs [ other_cert . issuer_serial ] = other_cert
Extracts any certificates included with an OCSP response and adds them to the certificate registry
23,293
def check_validation ( self , cert ) : if self . certificate_registry . is_ca ( cert ) and cert . signature not in self . _validate_map : self . _validate_map [ cert . signature ] = ValidationPath ( cert ) return self . _validate_map . get ( cert . signature )
Checks to see if a certificate has been validated and if so returns the ValidationPath used to validate it .
23,294
def clear_validation ( self , cert ) : if cert . signature in self . _validate_map : del self . _validate_map [ cert . signature ]
Clears the record that a certificate has been validated
23,295
def _find_cert_in_list ( cert , issuer , certificate_list , crl_issuer ) : revoked_certificates = certificate_list [ 'tbs_cert_list' ] [ 'revoked_certificates' ] cert_serial = cert . serial_number issuer_name = issuer . subject known_extensions = set ( [ 'crl_reason' , 'hold_instruction_code' , 'invalidity_date' , 'certificate_issuer' ] ) last_issuer_name = crl_issuer . subject for revoked_cert in revoked_certificates : if revoked_cert . critical_extensions - known_extensions : raise NotImplementedError ( ) if revoked_cert . issuer_name and revoked_cert . issuer_name != last_issuer_name : last_issuer_name = revoked_cert . issuer_name if last_issuer_name != issuer_name : continue if revoked_cert [ 'user_certificate' ] . native != cert_serial : continue if not revoked_cert . crl_reason_value : crl_reason = crl . CRLReason ( 'unspecified' ) else : crl_reason = revoked_cert . crl_reason_value return ( revoked_cert [ 'revocation_date' ] , crl_reason ) return ( None , None )
Looks for a cert in the list of revoked certificates
23,296
def add_child ( self , valid_policy , qualifier_set , expected_policy_set ) : child = PolicyTreeNode ( valid_policy , qualifier_set , expected_policy_set ) child . parent = self self . children . append ( child )
Creates a new PolicyTreeNode as a child of this node
23,297
def at_depth ( self , depth ) : for child in list ( self . children ) : if depth == 0 : yield child else : for grandchild in child . at_depth ( depth - 1 ) : yield grandchild
Returns a generator yielding all nodes in the tree at a specific depth
23,298
def walk_up ( self , depth ) : for child in list ( self . children ) : if depth != 0 : for grandchild in child . walk_up ( depth - 1 ) : yield grandchild yield child
Returns a generator yielding all nodes in the tree at a specific depth or above . Yields nodes starting with leaves and traversing up to the root .
23,299
def clear ( self ) : while not self . _pool . empty ( ) : conn = yield from self . _pool . get ( ) self . _do_close ( conn )
Clear pool connections .