idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
223,200
def _check_if_must_download ( request_list , redownload ) : for request in request_list : request . will_download = ( request . save_response or request . return_data ) and ( not request . is_downloaded ( ) or redownload )
Updates request . will_download attribute of each request in request_list .
61
16
223,201
def execute_download_request ( request ) : if request . save_response and request . data_folder is None : raise ValueError ( 'Data folder is not specified. ' 'Please give a data folder name in the initialization of your request.' ) if not request . will_download : return None try_num = SHConfig ( ) . max_download_attempts response = None while try_num > 0 : try : if request . is_aws_s3 ( ) : response = _do_aws_request ( request ) response_content = response [ 'Body' ] . read ( ) else : response = _do_request ( request ) response . raise_for_status ( ) response_content = response . content LOGGER . debug ( 'Successful download from %s' , request . url ) break except requests . RequestException as exception : try_num -= 1 if try_num > 0 and ( _is_temporal_problem ( exception ) or ( isinstance ( exception , requests . HTTPError ) and exception . response . status_code >= requests . status_codes . codes . INTERNAL_SERVER_ERROR ) or _request_limit_reached ( exception ) ) : LOGGER . debug ( 'Download attempt failed: %s\n%d attempts left, will retry in %ds' , exception , try_num , SHConfig ( ) . download_sleep_time ) sleep_time = SHConfig ( ) . download_sleep_time if _request_limit_reached ( exception ) : sleep_time = max ( sleep_time , 60 ) time . sleep ( sleep_time ) else : if request . url . startswith ( SHConfig ( ) . aws_metadata_url ) and isinstance ( exception , requests . HTTPError ) and exception . response . status_code == requests . status_codes . codes . NOT_FOUND : raise AwsDownloadFailedException ( 'File in location %s is missing' % request . url ) raise DownloadFailedException ( _create_download_failed_message ( exception , request . url ) ) _save_if_needed ( request , response_content ) if request . return_data : return decode_data ( response_content , request . data_type , entire_response = response ) return None
Executes download request .
488
5
223,202
def _is_temporal_problem ( exception ) : try : return isinstance ( exception , ( requests . ConnectionError , requests . Timeout ) ) except AttributeError : # Earlier requests versions might not have requests.Timeout return isinstance ( exception , requests . ConnectionError )
Checks if the obtained exception is temporal and if download attempt should be repeated
58
15
223,203
def _create_download_failed_message ( exception , url ) : message = 'Failed to download from:\n{}\nwith {}:\n{}' . format ( url , exception . __class__ . __name__ , exception ) if _is_temporal_problem ( exception ) : if isinstance ( exception , requests . ConnectionError ) : message += '\nPlease check your internet connection and try again.' else : message += '\nThere might be a problem in connection or the server failed to process ' 'your request. Please try again.' elif isinstance ( exception , requests . HTTPError ) : try : server_message = '' for elem in decode_data ( exception . response . content , MimeType . XML ) : if 'ServiceException' in elem . tag or 'Message' in elem . tag : server_message += elem . text . strip ( '\n\t ' ) except ElementTree . ParseError : server_message = exception . response . text message += '\nServer response: "{}"' . format ( server_message ) return message
Creates message describing why download has failed
235
8
223,204
def decode_data ( response_content , data_type , entire_response = None ) : LOGGER . debug ( 'data_type=%s' , data_type ) if data_type is MimeType . JSON : if isinstance ( entire_response , requests . Response ) : return entire_response . json ( ) return json . loads ( response_content . decode ( 'utf-8' ) ) if MimeType . is_image_format ( data_type ) : return decode_image ( response_content , data_type ) if data_type is MimeType . XML or data_type is MimeType . GML or data_type is MimeType . SAFE : return ElementTree . fromstring ( response_content ) try : return { MimeType . RAW : response_content , MimeType . TXT : response_content , MimeType . REQUESTS_RESPONSE : entire_response , MimeType . ZIP : BytesIO ( response_content ) } [ data_type ] except KeyError : raise ValueError ( 'Unknown response data type {}' . format ( data_type ) )
Interprets downloaded data and returns it .
247
9
223,205
def decode_image ( data , image_type ) : bytes_data = BytesIO ( data ) if image_type . is_tiff_format ( ) : image = tiff . imread ( bytes_data ) else : image = np . array ( Image . open ( bytes_data ) ) if image_type is MimeType . JP2 : try : bit_depth = get_jp2_bit_depth ( bytes_data ) image = fix_jp2_image ( image , bit_depth ) except ValueError : pass if image is None : raise ImageDecodingError ( 'Unable to decode image' ) return image
Decodes the image provided in various formats i . e . png 16 - bit float tiff 32 - bit float tiff jp2 and returns it as an numpy array
137
37
223,206
def get_json ( url , post_values = None , headers = None ) : json_headers = { } if headers is None else headers . copy ( ) if post_values is None : request_type = RequestType . GET else : request_type = RequestType . POST json_headers = { * * json_headers , * * { 'Content-Type' : MimeType . get_string ( MimeType . JSON ) } } request = DownloadRequest ( url = url , headers = json_headers , request_type = request_type , post_values = post_values , save_response = False , return_data = True , data_type = MimeType . JSON ) return execute_download_request ( request )
Download request as JSON data type
157
6
223,207
def get_xml ( url ) : request = DownloadRequest ( url = url , request_type = RequestType . GET , save_response = False , return_data = True , data_type = MimeType . XML ) return execute_download_request ( request )
Download request as XML data type
57
6
223,208
def is_downloaded ( self ) : if self . file_path is None : return False return os . path . exists ( self . file_path )
Checks if data for this request has already been downloaded and is saved to disk .
33
17
223,209
def get_area_info ( bbox , date_interval , maxcc = None ) : result_list = search_iter ( bbox = bbox , start_date = date_interval [ 0 ] , end_date = date_interval [ 1 ] ) if maxcc : return reduce_by_maxcc ( result_list , maxcc ) return result_list
Get information about all images from specified area and time range
82
11
223,210
def get_area_dates ( bbox , date_interval , maxcc = None ) : area_info = get_area_info ( bbox , date_interval , maxcc = maxcc ) return sorted ( { datetime . datetime . strptime ( tile_info [ 'properties' ] [ 'startDate' ] . strip ( 'Z' ) , '%Y-%m-%dT%H:%M:%S' ) for tile_info in area_info } )
Get list of times of existing images from specified area and time range
111
13
223,211
def search_iter ( tile_id = None , bbox = None , start_date = None , end_date = None , absolute_orbit = None ) : if bbox and bbox . crs is not CRS . WGS84 : bbox = bbox . transform ( CRS . WGS84 ) url_params = _prepare_url_params ( tile_id , bbox , end_date , start_date , absolute_orbit ) url_params [ 'maxRecords' ] = SHConfig ( ) . max_opensearch_records_per_query start_index = 1 while True : url_params [ 'index' ] = start_index url = '{}search.json?{}' . format ( SHConfig ( ) . opensearch_url , urlencode ( url_params ) ) LOGGER . debug ( "URL=%s" , url ) response = get_json ( url ) for tile_info in response [ "features" ] : yield tile_info if len ( response [ "features" ] ) < SHConfig ( ) . max_opensearch_records_per_query : break start_index += SHConfig ( ) . max_opensearch_records_per_query
A generator function that implements OpenSearch search queries and returns results
271
12
223,212
def _prepare_url_params ( tile_id , bbox , end_date , start_date , absolute_orbit ) : url_params = { 'identifier' : tile_id , 'startDate' : start_date , 'completionDate' : end_date , 'orbitNumber' : absolute_orbit , 'box' : bbox } return { key : str ( value ) for key , value in url_params . items ( ) if value }
Constructs dict with URL params
101
6
223,213
def _edit_name ( name , code , add_code = None , delete_end = False ) : info = name . split ( '_' ) info [ 2 ] = code if add_code is not None : info [ 3 ] = add_code if delete_end : info . pop ( ) return '_' . join ( info )
Helping function for creating file names in . SAFE format
74
12
223,214
def get_requests ( self ) : safe = self . get_safe_struct ( ) self . download_list = [ ] self . structure_recursion ( safe , self . parent_folder ) self . sort_download_list ( ) return self . download_list , self . folder_list
Creates product structure and returns list of files for download
64
11
223,215
def get_tile_id ( self ) : tree = get_xml ( self . get_url ( AwsConstants . METADATA ) ) tile_id_tag = 'TILE_ID_2A' if self . data_source is DataSource . SENTINEL2_L2A and self . baseline <= '02.06' else 'TILE_ID' tile_id = tree [ 0 ] . find ( tile_id_tag ) . text if self . safe_type is not EsaSafeType . OLD_TYPE : info = tile_id . split ( '_' ) tile_id = '_' . join ( [ info [ 3 ] , info [ - 2 ] , info [ - 3 ] , self . get_sensing_time ( ) ] ) return tile_id
Creates ESA tile ID
176
5
223,216
def _parse_shape_list ( shape_list , crs ) : if not isinstance ( shape_list , list ) : raise ValueError ( 'Splitter must be initialized with a list of shapes' ) return [ AreaSplitter . _parse_shape ( shape , crs ) for shape in shape_list ]
Checks if the given list of shapes is in correct format and parses geometry objects
68
17
223,217
def get_bbox_list ( self , crs = None , buffer = None , reduce_bbox_sizes = None ) : bbox_list = self . bbox_list if buffer : bbox_list = [ bbox . buffer ( buffer ) for bbox in bbox_list ] if reduce_bbox_sizes is None : reduce_bbox_sizes = self . reduce_bbox_sizes if reduce_bbox_sizes : bbox_list = self . _reduce_sizes ( bbox_list ) if crs : return [ bbox . transform ( crs ) for bbox in bbox_list ] return bbox_list
Returns a list of bounding boxes that are the result of the split
150
14
223,218
def get_area_bbox ( self , crs = None ) : bbox_list = [ BBox ( shape . bounds , crs = self . crs ) for shape in self . shape_list ] area_minx = min ( [ bbox . lower_left [ 0 ] for bbox in bbox_list ] ) area_miny = min ( [ bbox . lower_left [ 1 ] for bbox in bbox_list ] ) area_maxx = max ( [ bbox . upper_right [ 0 ] for bbox in bbox_list ] ) area_maxy = max ( [ bbox . upper_right [ 1 ] for bbox in bbox_list ] ) bbox = BBox ( [ area_minx , area_miny , area_maxx , area_maxy ] , crs = self . crs ) if crs is None : return bbox return bbox . transform ( crs )
Returns a bounding box of the entire area
208
9
223,219
def _bbox_to_area_polygon ( self , bbox ) : projected_bbox = bbox . transform ( self . crs ) return projected_bbox . geometry
Transforms bounding box into a polygon object in the area CRS .
40
16
223,220
def _reduce_sizes ( self , bbox_list ) : return [ BBox ( self . _intersection_area ( bbox ) . bounds , self . crs ) . transform ( bbox . crs ) for bbox in bbox_list ]
Reduces sizes of bounding boxes
58
7
223,221
def _parse_split_shape ( split_shape ) : if isinstance ( split_shape , int ) : return split_shape , split_shape if isinstance ( split_shape , ( tuple , list ) ) : if len ( split_shape ) == 2 and isinstance ( split_shape [ 0 ] , int ) and isinstance ( split_shape [ 1 ] , int ) : return split_shape [ 0 ] , split_shape [ 1 ] raise ValueError ( "Content of split_shape {} must be 2 integers." . format ( split_shape ) ) raise ValueError ( "Split shape must be an int or a tuple of 2 integers." )
Parses the parameter split_shape
140
8
223,222
def _recursive_split ( self , bbox , zoom_level , column , row ) : if zoom_level == self . zoom_level : self . bbox_list . append ( bbox ) self . info_list . append ( { 'zoom_level' : zoom_level , 'index_x' : column , 'index_y' : row } ) return bbox_partition = bbox . get_partition ( 2 , 2 ) for i , j in itertools . product ( range ( 2 ) , range ( 2 ) ) : if self . _intersects_area ( bbox_partition [ i ] [ j ] ) : self . _recursive_split ( bbox_partition [ i ] [ j ] , zoom_level + 1 , 2 * column + i , 2 * row + 1 - j )
Method that recursively creates bounding boxes of OSM grid that intersect the area .
185
18
223,223
def _parse_bbox_grid ( bbox_grid ) : if isinstance ( bbox_grid , BBoxCollection ) : return bbox_grid if isinstance ( bbox_grid , list ) : return BBoxCollection ( bbox_grid ) raise ValueError ( "Parameter 'bbox_grid' should be an instance of {}" . format ( BBoxCollection . __name__ ) )
Helper method for parsing bounding box grid . It will try to parse it into BBoxCollection
87
19
223,224
def _parse_metafiles ( self , metafile_input ) : all_metafiles = AwsConstants . S2_L1C_METAFILES if self . data_source is DataSource . SENTINEL2_L1C else AwsConstants . S2_L2A_METAFILES if metafile_input is None : if self . __class__ . __name__ == 'SafeProduct' : return all_metafiles if self . __class__ . __name__ == 'SafeTile' : return [ metafile for metafile in all_metafiles if metafile in AwsConstants . TILE_FILES ] return [ ] if isinstance ( metafile_input , str ) : metafile_list = metafile_input . split ( ',' ) elif isinstance ( metafile_input , list ) : metafile_list = metafile_input . copy ( ) else : raise ValueError ( 'metafiles parameter must be a list or a string' ) metafile_list = [ metafile . strip ( ) . split ( '.' ) [ 0 ] for metafile in metafile_list ] metafile_list = [ metafile for metafile in metafile_list if metafile != '' ] if not set ( metafile_list ) <= set ( all_metafiles ) : raise ValueError ( 'metadata files {} must be a subset of {}' . format ( metafile_list , all_metafiles ) ) return metafile_list
Parses class input and verifies metadata file names .
353
12
223,225
def get_base_url ( self , force_http = False ) : base_url = SHConfig ( ) . aws_metadata_url . rstrip ( '/' ) if force_http else 's3:/' aws_bucket = SHConfig ( ) . aws_s3_l1c_bucket if self . data_source is DataSource . SENTINEL2_L1C else SHConfig ( ) . aws_s3_l2a_bucket return '{}/{}/' . format ( base_url , aws_bucket )
Creates base URL path
130
5
223,226
def get_safe_type ( self ) : product_type = self . product_id . split ( '_' ) [ 1 ] if product_type . startswith ( 'MSI' ) : return EsaSafeType . COMPACT_TYPE if product_type in [ 'OPER' , 'USER' ] : return EsaSafeType . OLD_TYPE raise ValueError ( 'Unrecognized product type of product id {}' . format ( self . product_id ) )
Determines the type of ESA product .
104
9
223,227
def _read_baseline_from_info ( self ) : if hasattr ( self , 'tile_info' ) : return self . tile_info [ 'datastrip' ] [ 'id' ] [ - 5 : ] if hasattr ( self , 'product_info' ) : return self . product_info [ 'datastrips' ] [ 0 ] [ 'id' ] [ - 5 : ] raise ValueError ( 'No info file has been obtained yet.' )
Tries to find and return baseline number from either tileInfo or productInfo file .
104
17
223,228
def url_to_tile ( url ) : info = url . strip ( '/' ) . split ( '/' ) name = '' . join ( info [ - 7 : - 4 ] ) date = '-' . join ( info [ - 4 : - 1 ] ) return name , date , int ( info [ - 1 ] )
Extracts tile name date and AWS index from tile url on AWS .
69
15
223,229
def structure_recursion ( self , struct , folder ) : has_subfolder = False for name , substruct in struct . items ( ) : subfolder = os . path . join ( folder , name ) if not isinstance ( substruct , dict ) : product_name , data_name = self . _url_to_props ( substruct ) if '.' in data_name : data_type = MimeType ( data_name . split ( '.' ) [ - 1 ] ) data_name = data_name . rsplit ( '.' , 1 ) [ 0 ] else : data_type = MimeType . RAW if data_name in self . bands + self . metafiles : self . download_list . append ( DownloadRequest ( url = substruct , filename = subfolder , data_type = data_type , data_name = data_name , product_name = product_name ) ) else : has_subfolder = True self . structure_recursion ( substruct , subfolder ) if not has_subfolder : self . folder_list . append ( folder )
From nested dictionaries representing . SAFE structure it recursively extracts all the files that need to be downloaded and stores them into class attribute download_list .
235
32
223,230
def add_file_extension ( filename , data_format = None , remove_path = False ) : if data_format is None : data_format = AwsConstants . AWS_FILES [ filename ] if remove_path : filename = filename . split ( '/' ) [ - 1 ] if filename . startswith ( 'datastrip' ) : filename = filename . replace ( '*' , '0' ) if data_format is MimeType . RAW : return filename return '{}.{}' . format ( filename . replace ( '*' , '' ) , data_format . value )
Joins filename and corresponding file extension if it has one .
132
12
223,231
def is_early_compact_l2a ( self ) : return self . data_source is DataSource . SENTINEL2_L2A and self . safe_type is EsaSafeType . COMPACT_TYPE and self . baseline <= '02.06'
Check if product is early version of compact L2A product
60
12
223,232
def get_requests ( self ) : self . download_list = [ DownloadRequest ( url = self . get_url ( metafile ) , filename = self . get_filepath ( metafile ) , data_type = AwsConstants . AWS_FILES [ metafile ] , data_name = metafile ) for metafile in self . metafiles if metafile in AwsConstants . PRODUCT_FILES ] tile_parent_folder = os . path . join ( self . parent_folder , self . product_id ) for tile_info in self . product_info [ 'tiles' ] : tile_name , date , aws_index = self . url_to_tile ( self . get_tile_url ( tile_info ) ) if self . tile_list is None or AwsTile . parse_tile_name ( tile_name ) in self . tile_list : tile_downloads , tile_folders = AwsTile ( tile_name , date , aws_index , parent_folder = tile_parent_folder , bands = self . bands , metafiles = self . metafiles , data_source = self . data_source ) . get_requests ( ) self . download_list . extend ( tile_downloads ) self . folder_list . extend ( tile_folders ) self . sort_download_list ( ) return self . download_list , self . folder_list
Creates product structure and returns list of files for download .
316
12
223,233
def get_data_source ( self ) : product_type = self . product_id . split ( '_' ) [ 1 ] if product_type . endswith ( 'L1C' ) or product_type == 'OPER' : return DataSource . SENTINEL2_L1C if product_type . endswith ( 'L2A' ) or product_type == 'USER' : return DataSource . SENTINEL2_L2A raise ValueError ( 'Unknown data source of product {}' . format ( self . product_id ) )
The method determines data source from product ID .
125
9
223,234
def get_date ( self ) : if self . safe_type == EsaSafeType . OLD_TYPE : name = self . product_id . split ( '_' ) [ - 2 ] date = [ name [ 1 : 5 ] , name [ 5 : 7 ] , name [ 7 : 9 ] ] else : name = self . product_id . split ( '_' ) [ 2 ] date = [ name [ : 4 ] , name [ 4 : 6 ] , name [ 6 : 8 ] ] return '-' . join ( date_part . lstrip ( '0' ) for date_part in date )
Collects sensing date of the product .
133
8
223,235
def get_product_url ( self , force_http = False ) : base_url = self . base_http_url if force_http else self . base_url return '{}products/{}/{}' . format ( base_url , self . date . replace ( '-' , '/' ) , self . product_id )
Creates base url of product location on AWS .
75
10
223,236
def parse_tile_name ( name ) : tile_name = name . lstrip ( 'T0' ) if len ( tile_name ) == 4 : tile_name = '0' + tile_name if len ( tile_name ) != 5 : raise ValueError ( 'Invalid tile name {}' . format ( name ) ) return tile_name
Parses and verifies tile name .
75
9
223,237
def get_requests ( self ) : self . download_list = [ ] for data_name in [ band for band in self . bands if self . _band_exists ( band ) ] + self . metafiles : if data_name in AwsConstants . TILE_FILES : url = self . get_url ( data_name ) filename = self . get_filepath ( data_name ) self . download_list . append ( DownloadRequest ( url = url , filename = filename , data_type = AwsConstants . AWS_FILES [ data_name ] , data_name = data_name ) ) self . sort_download_list ( ) return self . download_list , self . folder_list
Creates tile structure and returns list of files for download .
158
12
223,238
def get_aws_index ( self ) : if self . aws_index is not None : return self . aws_index tile_info_list = get_tile_info ( self . tile_name , self . datetime , all_tiles = True ) if not tile_info_list : raise ValueError ( 'Cannot find aws_index for specified tile and time' ) if self . data_source is DataSource . SENTINEL2_L2A : for tile_info in sorted ( tile_info_list , key = self . _parse_aws_index ) : try : self . aws_index = self . _parse_aws_index ( tile_info ) self . get_tile_info ( ) return self . aws_index except AwsDownloadFailedException : pass return self . _parse_aws_index ( tile_info_list [ 0 ] )
Returns tile index on AWS . If tile_index was not set during class initialization it will be determined according to existing tiles on AWS .
196
27
223,239
def tile_is_valid ( self ) : return self . tile_info is not None and ( self . datetime == self . date or self . datetime == self . parse_datetime ( self . tile_info [ 'timestamp' ] ) )
Checks if tile has tile info and valid timestamp
55
10
223,240
def get_tile_url ( self , force_http = False ) : base_url = self . base_http_url if force_http else self . base_url url = '{}tiles/{}/{}/{}/' . format ( base_url , self . tile_name [ 0 : 2 ] . lstrip ( '0' ) , self . tile_name [ 2 ] , self . tile_name [ 3 : 5 ] ) date_params = self . date . split ( '-' ) for param in date_params : url += param . lstrip ( '0' ) + '/' return url + str ( self . aws_index )
Creates base url of tile location on AWS .
147
10
223,241
def split32 ( data ) : all_pieces = [ ] for position in range ( 0 , len ( data ) , 32 ) : piece = data [ position : position + 32 ] all_pieces . append ( piece ) return all_pieces
Split data into pieces of 32 bytes .
50
8
223,242
def _canonical_type ( name ) : # pylint: disable=too-many-return-statements if name == 'int' : return 'int256' if name == 'uint' : return 'uint256' if name == 'fixed' : return 'fixed128x128' if name == 'ufixed' : return 'ufixed128x128' if name . startswith ( 'int[' ) : return 'int256' + name [ 3 : ] if name . startswith ( 'uint[' ) : return 'uint256' + name [ 4 : ] if name . startswith ( 'fixed[' ) : return 'fixed128x128' + name [ 5 : ] if name . startswith ( 'ufixed[' ) : return 'ufixed128x128' + name [ 6 : ] return name
Replace aliases to the corresponding type to compute the ids .
179
13
223,243
def method_id ( name , encode_types ) : function_types = [ _canonical_type ( type_ ) for type_ in encode_types ] function_signature = '{function_name}({canonical_types})' . format ( function_name = name , canonical_types = ',' . join ( function_types ) , ) function_keccak = utils . sha3 ( function_signature ) first_bytes = function_keccak [ : 4 ] return big_endian_to_int ( first_bytes )
Return the unique method id .
121
6
223,244
def event_id ( name , encode_types ) : event_types = [ _canonical_type ( type_ ) for type_ in encode_types ] event_signature = '{event_name}({canonical_types})' . format ( event_name = name , canonical_types = ',' . join ( event_types ) , ) return big_endian_to_int ( utils . sha3 ( event_signature ) )
Return the event id .
99
5
223,245
def encode_function_call ( self , function_name , args ) : if function_name not in self . function_data : raise ValueError ( 'Unkown function {}' . format ( function_name ) ) description = self . function_data [ function_name ] function_selector = zpad ( encode_int ( description [ 'prefix' ] ) , 4 ) arguments = encode_abi ( description [ 'encode_types' ] , args ) return function_selector + arguments
Return the encoded function call .
106
6
223,246
def decode_function_result ( self , function_name , data ) : description = self . function_data [ function_name ] arguments = decode_abi ( description [ 'decode_types' ] , data ) return arguments
Return the function call result decoded .
48
8
223,247
def encode_constructor_arguments ( self , args ) : if self . constructor_data is None : raise ValueError ( "The contract interface didn't have a constructor" ) return encode_abi ( self . constructor_data [ 'encode_types' ] , args )
Return the encoded constructor call .
59
6
223,248
def decode_event ( self , log_topics , log_data ) : # https://github.com/ethereum/wiki/wiki/Ethereum-Contract-ABI#function-selector-and-argument-encoding # topics[0]: keccak(EVENT_NAME+"("+EVENT_ARGS.map(canonical_type_of).join(",")+")") # If the event is declared as anonymous the topics[0] is not generated; if not len ( log_topics ) or log_topics [ 0 ] not in self . event_data : raise ValueError ( 'Unknown log type' ) event_id_ = log_topics [ 0 ] event = self . event_data [ event_id_ ] # data: abi_serialise(EVENT_NON_INDEXED_ARGS) # EVENT_NON_INDEXED_ARGS is the series of EVENT_ARGS that are not # indexed, abi_serialise is the ABI serialisation function used for # returning a series of typed values from a function. unindexed_types = [ type_ for type_ , indexed in zip ( event [ 'types' ] , event [ 'indexed' ] ) if not indexed ] unindexed_args = decode_abi ( unindexed_types , log_data ) # topics[n]: EVENT_INDEXED_ARGS[n - 1] # EVENT_INDEXED_ARGS is the series of EVENT_ARGS that are indexed indexed_count = 1 # skip topics[0] result = { } for name , type_ , indexed in zip ( event [ 'names' ] , event [ 'types' ] , event [ 'indexed' ] ) : if indexed : topic_bytes = utils . zpad ( utils . encode_int ( log_topics [ indexed_count ] ) , 32 , ) indexed_count += 1 value = decode_single ( process_type ( type_ ) , topic_bytes ) else : value = unindexed_args . pop ( 0 ) result [ name ] = value result [ '_event_type' ] = utils . to_string ( event [ 'name' ] ) return result
Return a dictionary representation the log .
484
7
223,249
def listen ( self , log , noprint = True ) : try : result = self . decode_event ( log . topics , log . data ) except ValueError : return # api compatibility if not noprint : print ( result ) return result
Return a dictionary representation of the Log instance .
52
9
223,250
def unpack_to_nibbles ( bindata ) : o = bin_to_nibbles ( bindata ) flags = o [ 0 ] if flags & 2 : o . append ( NIBBLE_TERMINATOR ) if flags & 1 == 1 : o = o [ 1 : ] else : o = o [ 2 : ] return o
unpack packed binary data to nibbles
75
8
223,251
def starts_with ( full , part ) : if len ( full ) < len ( part ) : return False return full [ : len ( part ) ] == part
test whether the items in the part is the leading items of the full
34
14
223,252
def _get_node_type ( self , node ) : if node == BLANK_NODE : return NODE_TYPE_BLANK if len ( node ) == 2 : nibbles = unpack_to_nibbles ( node [ 0 ] ) has_terminator = ( nibbles and nibbles [ - 1 ] == NIBBLE_TERMINATOR ) return NODE_TYPE_LEAF if has_terminator else NODE_TYPE_EXTENSION if len ( node ) == 17 : return NODE_TYPE_BRANCH
get node type and content
118
5
223,253
def _get ( self , node , key ) : node_type = self . _get_node_type ( node ) if node_type == NODE_TYPE_BLANK : return BLANK_NODE if node_type == NODE_TYPE_BRANCH : # already reach the expected node if not key : return node [ - 1 ] sub_node = self . _decode_to_node ( node [ key [ 0 ] ] ) return self . _get ( sub_node , key [ 1 : ] ) # key value node curr_key = without_terminator ( unpack_to_nibbles ( node [ 0 ] ) ) if node_type == NODE_TYPE_LEAF : return node [ 1 ] if key == curr_key else BLANK_NODE if node_type == NODE_TYPE_EXTENSION : # traverse child nodes if starts_with ( key , curr_key ) : sub_node = self . _decode_to_node ( node [ 1 ] ) return self . _get ( sub_node , key [ len ( curr_key ) : ] ) else : return BLANK_NODE
get value inside a node
253
5
223,254
def _normalize_branch_node ( self , node ) : # sys.stderr.write('nbn\n') not_blank_items_count = sum ( 1 for x in range ( 17 ) if node [ x ] ) assert not_blank_items_count >= 1 if not_blank_items_count > 1 : self . _encode_node ( node ) return node # now only one item is not blank not_blank_index = [ i for i , item in enumerate ( node ) if item ] [ 0 ] # the value item is not blank if not_blank_index == 16 : o = [ pack_nibbles ( with_terminator ( [ ] ) ) , node [ 16 ] ] self . _encode_node ( o ) return o # normal item is not blank sub_node = self . _decode_to_node ( node [ not_blank_index ] ) sub_node_type = self . _get_node_type ( sub_node ) if is_key_value_type ( sub_node_type ) : # collape subnode to this node, not this node will have same # terminator with the new sub node, and value does not change self . _delete_node_storage ( sub_node ) new_key = [ not_blank_index ] + unpack_to_nibbles ( sub_node [ 0 ] ) o = [ pack_nibbles ( new_key ) , sub_node [ 1 ] ] self . _encode_node ( o ) return o if sub_node_type == NODE_TYPE_BRANCH : o = [ pack_nibbles ( [ not_blank_index ] ) , node [ not_blank_index ] ] self . _encode_node ( o ) return o assert False
node should have only one item changed
394
7
223,255
def DEBUG ( msg , * args , * * kwargs ) : logger = getLogger ( "DEBUG" ) if len ( logger . handlers ) == 0 : logger . addHandler ( StreamHandler ( ) ) logger . propagate = False logger . setLevel ( logging . DEBUG ) logger . DEV ( msg , * args , * * kwargs )
temporary logger during development that is always on
75
9
223,256
def vm_trace ( ext , msg , compustate , opcode , pushcache , tracer = log_vm_op ) : op , in_args , out_args , fee = opcodes . opcodes [ opcode ] trace_data = { } trace_data [ 'stack' ] = list ( map ( to_string , list ( compustate . prev_stack ) ) ) if compustate . prev_prev_op in ( 'MLOAD' , 'MSTORE' , 'MSTORE8' , 'SHA3' , 'CALL' , 'CALLCODE' , 'CREATE' , 'CALLDATACOPY' , 'CODECOPY' , 'EXTCODECOPY' ) : if len ( compustate . prev_memory ) < 4096 : trace_data [ 'memory' ] = '' . join ( [ encode_hex ( ascii_chr ( x ) ) for x in compustate . prev_memory ] ) else : trace_data [ 'sha3memory' ] = encode_hex ( utils . sha3 ( b'' . join ( [ ascii_chr ( x ) for x in compustate . prev_memory ] ) ) ) if compustate . prev_prev_op in ( 'SSTORE' , ) or compustate . steps == 0 : trace_data [ 'storage' ] = ext . log_storage ( msg . to ) trace_data [ 'gas' ] = to_string ( compustate . prev_gas ) trace_data [ 'gas_cost' ] = to_string ( compustate . prev_gas - compustate . gas ) trace_data [ 'fee' ] = fee trace_data [ 'inst' ] = opcode trace_data [ 'pc' ] = to_string ( compustate . prev_pc ) if compustate . steps == 0 : trace_data [ 'depth' ] = msg . depth trace_data [ 'address' ] = msg . to trace_data [ 'steps' ] = compustate . steps trace_data [ 'depth' ] = msg . depth if op [ : 4 ] == 'PUSH' : print ( repr ( pushcache ) ) trace_data [ 'pushvalue' ] = pushcache [ compustate . prev_pc ] tracer . trace ( 'vm' , op = op , * * trace_data ) compustate . steps += 1 compustate . prev_prev_op = op
This diverges from normal logging as we use the logging namespace only to decide which features get logged in eth . vm . op i . e . tracing can not be activated by activating a sub like eth . vm . op . stack
548
46
223,257
def sign ( self , key , network_id = None ) : if network_id is None : rawhash = utils . sha3 ( rlp . encode ( unsigned_tx_from_tx ( self ) , UnsignedTransaction ) ) else : assert 1 <= network_id < 2 ** 63 - 18 rlpdata = rlp . encode ( rlp . infer_sedes ( self ) . serialize ( self ) [ : - 3 ] + [ network_id , b'' , b'' ] ) rawhash = utils . sha3 ( rlpdata ) key = normalize_key ( key ) v , r , s = ecsign ( rawhash , key ) if network_id is not None : v += 8 + network_id * 2 ret = self . copy ( v = v , r = r , s = s ) ret . _sender = utils . privtoaddr ( key ) return ret
Sign this transaction with a private key .
200
8
223,258
def creates ( self ) : if self . to in ( b'' , '\0' * 20 ) : return mk_contract_address ( self . sender , self . nonce )
returns the address of a contract created by this tx
39
11
223,259
def check_pow ( block_number , header_hash , mixhash , nonce , difficulty ) : log . debug ( 'checking pow' , block_number = block_number ) if len ( mixhash ) != 32 or len ( header_hash ) != 32 or len ( nonce ) != 8 : return False # Grab current cache cache = get_cache ( block_number ) mining_output = hashimoto_light ( block_number , cache , header_hash , nonce ) if mining_output [ b'mix digest' ] != mixhash : return False return utils . big_endian_to_int ( mining_output [ b'result' ] ) <= 2 ** 256 // ( difficulty or 1 )
Check if the proof - of - work of the block is valid .
155
14
223,260
def to_dict ( self ) : d = { } for field in ( 'prevhash' , 'uncles_hash' , 'extra_data' , 'nonce' , 'mixhash' ) : d [ field ] = '0x' + encode_hex ( getattr ( self , field ) ) for field in ( 'state_root' , 'tx_list_root' , 'receipts_root' , 'coinbase' ) : d [ field ] = encode_hex ( getattr ( self , field ) ) for field in ( 'number' , 'difficulty' , 'gas_limit' , 'gas_used' , 'timestamp' ) : d [ field ] = utils . to_string ( getattr ( self , field ) ) d [ 'bloom' ] = encode_hex ( int256 . serialize ( self . bloom ) ) assert len ( d ) == len ( BlockHeader . fields ) return d
Serialize the header to a readable dictionary .
206
9
223,261
def decode_int ( v ) : if len ( v ) > 0 and ( v [ 0 ] == b'\x00' or v [ 0 ] == 0 ) : raise Exception ( "No leading zero bytes allowed for integers" ) return big_endian_to_int ( v )
decodes and integer from serialization
62
7
223,262
def encode_int ( v ) : if not is_numeric ( v ) or v < 0 or v >= TT256 : raise Exception ( "Integer invalid or out of range: %r" % v ) return int_to_big_endian ( v )
encodes an integer into serialization
56
7
223,263
def print_func_call ( ignore_first_arg = False , max_call_number = 100 ) : from functools import wraps def display ( x ) : x = to_string ( x ) try : x . decode ( 'ascii' ) except BaseException : return 'NON_PRINTABLE' return x local = { 'call_number' : 0 } def inner ( f ) : @ wraps ( f ) def wrapper ( * args , * * kwargs ) : local [ 'call_number' ] += 1 tmp_args = args [ 1 : ] if ignore_first_arg and len ( args ) else args this_call_number = local [ 'call_number' ] print ( ( '{0}#{1} args: {2}, {3}' . format ( f . __name__ , this_call_number , ', ' . join ( [ display ( x ) for x in tmp_args ] ) , ', ' . join ( display ( key ) + '=' + to_string ( value ) for key , value in kwargs . items ( ) ) ) ) ) res = f ( * args , * * kwargs ) print ( ( '{0}#{1} return: {2}' . format ( f . __name__ , this_call_number , display ( res ) ) ) ) if local [ 'call_number' ] > 100 : raise Exception ( "Touch max call number!" ) return res return wrapper return inner
utility function to facilitate debug it will print input args before function call and print return value after function call
321
21
223,264
def get_compiler_path ( ) : # If the user provides a specific solc binary let's use that given_binary = os . environ . get ( 'SOLC_BINARY' ) if given_binary : return given_binary for path in os . getenv ( 'PATH' , '' ) . split ( os . pathsep ) : path = path . strip ( '"' ) executable_path = os . path . join ( path , BINARY ) if os . path . isfile ( executable_path ) and os . access ( executable_path , os . X_OK ) : return executable_path return None
Return the path to the solc compiler .
137
9
223,265
def solc_arguments ( libraries = None , combined = 'bin,abi' , optimize = True , extra_args = None ) : args = [ '--combined-json' , combined , ] def str_of ( address ) : """cast address to string. py2/3 compatability. """ try : return address . decode ( 'utf8' ) except AttributeError : return address if optimize : args . append ( '--optimize' ) if extra_args : try : args . extend ( shlex . split ( extra_args ) ) except BaseException : # if not a parseable string then treat it as a list args . extend ( extra_args ) if libraries is not None and len ( libraries ) : addresses = [ '{name}:{address}' . format ( name = name , address = str_of ( address ) ) for name , address in libraries . items ( ) ] args . extend ( [ '--libraries' , ',' . join ( addresses ) , ] ) return args
Build the arguments to call the solc binary .
216
10
223,266
def solc_parse_output ( compiler_output ) : # At the moment some solc output like --hashes or -- gas will not output # json at all so if used with those arguments the logic here will break. # Perhaps solidity will slowly switch to a json only output and this comment # can eventually go away and we will not need to add more logic here at # all. result = yaml . safe_load ( compiler_output ) [ 'contracts' ] if 'bin' in tuple ( result . values ( ) ) [ 0 ] : for value in result . values ( ) : value [ 'bin_hex' ] = value [ 'bin' ] # decoding can fail if the compiled contract has unresolved symbols try : value [ 'bin' ] = decode_hex ( value [ 'bin_hex' ] ) except ( TypeError , ValueError ) : pass for json_data in ( 'abi' , 'devdoc' , 'userdoc' ) : # the values in the output can be configured through the # --combined-json flag, check that it's present in the first value and # assume all values are consistent if json_data not in tuple ( result . values ( ) ) [ 0 ] : continue for value in result . values ( ) : value [ json_data ] = yaml . safe_load ( value [ json_data ] ) return result
Parses the compiler output .
291
7
223,267
def compiler_version ( ) : version_info = subprocess . check_output ( [ 'solc' , '--version' ] ) match = re . search ( b'^Version: ([0-9a-z.-]+)/' , version_info , re . MULTILINE ) if match : return match . group ( 1 )
Return the version of the installed solc .
74
9
223,268
def solidity_names ( code ) : # pylint: disable=too-many-branches names = [ ] in_string = None backslash = False comment = None # "parse" the code by hand to handle the corner cases: # - the contract or library can be inside a comment or string # - multiline comments # - the contract and library keywords could not be at the start of the line for pos , char in enumerate ( code ) : if in_string : if not backslash and in_string == char : in_string = None backslash = False if char == '\\' : # pylint: disable=simplifiable-if-statement backslash = True else : backslash = False elif comment == '//' : if char in ( '\n' , '\r' ) : comment = None elif comment == '/*' : if char == '*' and code [ pos + 1 ] == '/' : comment = None else : if char == '"' or char == "'" : in_string = char if char == '/' : if code [ pos + 1 ] == '/' : comment = '//' if code [ pos + 1 ] == '*' : comment = '/*' if char == 'c' and code [ pos : pos + 8 ] == 'contract' : result = re . match ( '^contract[^_$a-zA-Z]+([_$a-zA-Z][_$a-zA-Z0-9]*)' , code [ pos : ] ) if result : names . append ( ( 'contract' , result . groups ( ) [ 0 ] ) ) if char == 'i' and code [ pos : pos + 9 ] == 'interface' : result = re . match ( '^interface[^_$a-zA-Z]+([_$a-zA-Z][_$a-zA-Z0-9]*)' , code [ pos : ] ) if result : names . append ( ( 'contract' , result . groups ( ) [ 0 ] ) ) if char == 'l' and code [ pos : pos + 7 ] == 'library' : result = re . match ( '^library[^_$a-zA-Z]+([_$a-zA-Z][_$a-zA-Z0-9]*)' , code [ pos : ] ) if result : names . append ( ( 'library' , result . groups ( ) [ 0 ] ) ) return names
Return the library and contract names in order of appearence .
551
13
223,269
def compile_file ( filepath , libraries = None , combined = 'bin,abi' , optimize = True , extra_args = None ) : workdir , filename = os . path . split ( filepath ) args = solc_arguments ( libraries = libraries , combined = combined , optimize = optimize , extra_args = extra_args ) args . insert ( 0 , get_compiler_path ( ) ) args . append ( filename ) output = subprocess . check_output ( args , cwd = workdir ) return solc_parse_output ( output )
Return the compile contract code .
121
6
223,270
def solidity_get_contract_key ( all_contracts , filepath , contract_name ) : if contract_name in all_contracts : return contract_name else : if filepath is None : filename = '<stdin>' else : _ , filename = os . path . split ( filepath ) contract_key = filename + ":" + contract_name return contract_key if contract_key in all_contracts else None
A backwards compatible method of getting the key to the all_contracts dictionary for a particular contract
95
19
223,271
def compile ( cls , code , path = None , libraries = None , contract_name = '' , extra_args = None ) : result = cls . _code_or_path ( code , path , contract_name , libraries , 'bin' , extra_args ) return result [ 'bin' ]
Return the binary of last contract in code .
66
9
223,272
def combined ( cls , code , path = None , extra_args = None ) : if code and path : raise ValueError ( 'sourcecode and path are mutually exclusive.' ) if path : contracts = compile_file ( path , extra_args = extra_args ) with open ( path ) as handler : code = handler . read ( ) elif code : contracts = compile_code ( code , extra_args = extra_args ) else : raise ValueError ( 'either code or path needs to be supplied.' ) sorted_contracts = [ ] for name in solidity_names ( code ) : sorted_contracts . append ( ( name [ 1 ] , solidity_get_contract_data ( contracts , path , name [ 1 ] ) ) ) return sorted_contracts
Compile combined - json with abi bin devdoc userdoc .
165
14
223,273
def compile_rich ( cls , code , path = None , extra_args = None ) : return { contract_name : { 'code' : '0x' + contract . get ( 'bin_hex' ) , 'info' : { 'abiDefinition' : contract . get ( 'abi' ) , 'compilerVersion' : cls . compiler_version ( ) , 'developerDoc' : contract . get ( 'devdoc' ) , 'language' : 'Solidity' , 'languageVersion' : '0' , 'source' : code , 'userDoc' : contract . get ( 'userdoc' ) } , } for contract_name , contract in cls . combined ( code , path = path , extra_args = extra_args ) }
full format as returned by jsonrpc
166
8
223,274
def validate_uncles ( state , block ) : # Make sure hash matches up if utils . sha3 ( rlp . encode ( block . uncles ) ) != block . header . uncles_hash : raise VerificationFailed ( "Uncle hash mismatch" ) # Enforce maximum number of uncles if len ( block . uncles ) > state . config [ 'MAX_UNCLES' ] : raise VerificationFailed ( "Too many uncles" ) # Uncle must have lower block number than blockj for uncle in block . uncles : if uncle . number >= block . header . number : raise VerificationFailed ( "Uncle number too high" ) # Check uncle validity MAX_UNCLE_DEPTH = state . config [ 'MAX_UNCLE_DEPTH' ] ancestor_chain = [ block . header ] + [ a for a in state . prev_headers [ : MAX_UNCLE_DEPTH + 1 ] if a ] # Uncles of this block cannot be direct ancestors and cannot also # be uncles included 1-6 blocks ago ineligible = [ b . hash for b in ancestor_chain ] for blknum , uncles in state . recent_uncles . items ( ) : if state . block_number > int ( blknum ) >= state . block_number - MAX_UNCLE_DEPTH : ineligible . extend ( [ u for u in uncles ] ) eligible_ancestor_hashes = [ x . hash for x in ancestor_chain [ 2 : ] ] for uncle in block . uncles : if uncle . prevhash not in eligible_ancestor_hashes : raise VerificationFailed ( "Uncle does not have a valid ancestor" ) parent = [ x for x in ancestor_chain if x . hash == uncle . prevhash ] [ 0 ] if uncle . difficulty != calc_difficulty ( parent , uncle . timestamp , config = state . config ) : raise VerificationFailed ( "Difficulty mismatch" ) if uncle . number != parent . number + 1 : raise VerificationFailed ( "Number mismatch" ) if uncle . timestamp < parent . timestamp : raise VerificationFailed ( "Timestamp mismatch" ) if uncle . hash in ineligible : raise VerificationFailed ( "Duplicate uncle" ) if uncle . gas_used > uncle . gas_limit : raise VerificationFailed ( "Uncle used too much gas" ) if not check_pow ( state , uncle ) : raise VerificationFailed ( 'uncle pow mismatch' ) ineligible . append ( uncle . hash ) return True
Validate the uncles of this block .
550
9
223,275
def finalize ( state , block ) : if state . is_METROPOLIS ( ) : br = state . config [ 'BYZANTIUM_BLOCK_REWARD' ] nr = state . config [ 'BYZANTIUM_NEPHEW_REWARD' ] else : br = state . config [ 'BLOCK_REWARD' ] nr = state . config [ 'NEPHEW_REWARD' ] delta = int ( br + nr * len ( block . uncles ) ) state . delta_balance ( state . block_coinbase , delta ) udpf = state . config [ 'UNCLE_DEPTH_PENALTY_FACTOR' ] for uncle in block . uncles : r = int ( br * ( udpf + uncle . number - state . block_number ) // udpf ) state . delta_balance ( uncle . coinbase , r ) if state . block_number - state . config [ 'MAX_UNCLE_DEPTH' ] in state . recent_uncles : del state . recent_uncles [ state . block_number - state . config [ 'MAX_UNCLE_DEPTH' ] ]
Apply rewards and commit .
258
5
223,276
def ensure_new_style_deprecation ( cli_ctx , kwargs , object_type ) : deprecate_info = kwargs . get ( 'deprecate_info' , None ) if isinstance ( deprecate_info , Deprecated ) : deprecate_info . object_type = object_type elif isinstance ( deprecate_info , STRING_TYPES ) : deprecate_info = Deprecated ( cli_ctx , redirect = deprecate_info , object_type = object_type ) kwargs [ 'deprecate_info' ] = deprecate_info return deprecate_info
Helper method to make the previous string - based deprecate_info kwarg work with the new style .
147
23
223,277
def _version_less_than_or_equal_to ( self , v1 , v2 ) : # pylint: disable=no-name-in-module, import-error from distutils . version import LooseVersion return LooseVersion ( v1 ) <= LooseVersion ( v2 )
Returns true if v1 < = v2 .
66
10
223,278
def prompt_choice_list ( msg , a_list , default = 1 , help_string = None ) : verify_is_a_tty ( ) options = '\n' . join ( [ ' [{}] {}{}' . format ( i + 1 , x [ 'name' ] if isinstance ( x , dict ) and 'name' in x else x , ' - ' + x [ 'desc' ] if isinstance ( x , dict ) and 'desc' in x else '' ) for i , x in enumerate ( a_list ) ] ) allowed_vals = list ( range ( 1 , len ( a_list ) + 1 ) ) while True : val = _input ( '{}\n{}\nPlease enter a choice [Default choice({})]: ' . format ( msg , options , default ) ) if val == '?' and help_string is not None : print ( help_string ) continue if not val : val = '{}' . format ( default ) try : ans = int ( val ) if ans in allowed_vals : # array index is 0-based, user input is 1-based return ans - 1 raise ValueError except ValueError : logger . warning ( 'Valid values are %s' , allowed_vals )
Prompt user to select from a list of possible choices .
270
12
223,279
def _add_argument ( obj , arg ) : argparse_options = { name : value for name , value in arg . options . items ( ) if name in ARGPARSE_SUPPORTED_KWARGS } if arg . options_list : scrubbed_options_list = [ ] for item in arg . options_list : if isinstance ( item , Deprecated ) : # don't add expired options to the parser if item . expired ( ) : continue class _DeprecatedOption ( str ) : def __new__ ( cls , * args , * * kwargs ) : instance = str . __new__ ( cls , * args , * * kwargs ) return instance option = _DeprecatedOption ( item . target ) setattr ( option , 'deprecate_info' , item ) item = option scrubbed_options_list . append ( item ) return obj . add_argument ( * scrubbed_options_list , * * argparse_options ) if 'required' in argparse_options : del argparse_options [ 'required' ] if 'metavar' not in argparse_options : argparse_options [ 'metavar' ] = '<{}>' . format ( argparse_options [ 'dest' ] . upper ( ) ) return obj . add_argument ( * * argparse_options )
Only pass valid argparse kwargs to argparse . ArgumentParser . add_argument
293
18
223,280
def load_command_table ( self , command_loader ) : cmd_tbl = command_loader . command_table grp_tbl = command_loader . command_group_table if not cmd_tbl : raise ValueError ( 'The command table is empty. At least one command is required.' ) # If we haven't already added a subparser, we # better do it. if not self . subparsers : sp = self . add_subparsers ( dest = '_command' ) sp . required = True self . subparsers = { ( ) : sp } for command_name , metadata in cmd_tbl . items ( ) : subparser = self . _get_subparser ( command_name . split ( ) , grp_tbl ) command_verb = command_name . split ( ) [ - 1 ] # To work around http://bugs.python.org/issue9253, we artificially add any new # parsers we add to the "choices" section of the subparser. subparser = self . _get_subparser ( command_name . split ( ) , grp_tbl ) deprecate_info = metadata . deprecate_info if not subparser or ( deprecate_info and deprecate_info . expired ( ) ) : continue # inject command_module designer's help formatter -- default is HelpFormatter fc = metadata . formatter_class or argparse . HelpFormatter command_parser = subparser . add_parser ( command_verb , description = metadata . description , parents = self . parents , conflict_handler = 'error' , help_file = metadata . help , formatter_class = fc , cli_help = self . cli_help ) command_parser . cli_ctx = self . cli_ctx command_validator = metadata . validator argument_validators = [ ] argument_groups = { } for arg in metadata . arguments . values ( ) : # don't add deprecated arguments to the parser deprecate_info = arg . type . settings . get ( 'deprecate_info' , None ) if deprecate_info and deprecate_info . expired ( ) : continue if arg . validator : argument_validators . append ( arg . validator ) if arg . arg_group : try : group = argument_groups [ arg . arg_group ] except KeyError : # group not found so create group_name = '{} Arguments' . format ( arg . arg_group ) group = command_parser . add_argument_group ( arg . arg_group , group_name ) argument_groups [ arg . arg_group ] = group param = CLICommandParser . _add_argument ( group , arg ) else : param = CLICommandParser . _add_argument ( command_parser , arg ) param . completer = arg . completer param . deprecate_info = arg . deprecate_info command_parser . set_defaults ( func = metadata , command = command_name , _command_validator = command_validator , _argument_validators = argument_validators , _parser = command_parser )
Process the command table and load it into the parser
689
10
223,281
def _get_subparser ( self , path , group_table = None ) : group_table = group_table or { } for length in range ( 0 , len ( path ) ) : parent_path = path [ : length ] parent_subparser = self . subparsers . get ( tuple ( parent_path ) , None ) if not parent_subparser : # No subparser exists for the given subpath - create and register # a new subparser. # Since we know that we always have a root subparser (we created) # one when we started loading the command table, and we walk the # path from left to right (i.e. for "cmd subcmd1 subcmd2", we start # with ensuring that a subparser for cmd exists, then for subcmd1, # subcmd2 and so on), we know we can always back up one step and # add a subparser if one doesn't exist command_group = group_table . get ( ' ' . join ( parent_path ) ) if command_group : deprecate_info = command_group . group_kwargs . get ( 'deprecate_info' , None ) if deprecate_info and deprecate_info . expired ( ) : continue grandparent_path = path [ : length - 1 ] grandparent_subparser = self . subparsers [ tuple ( grandparent_path ) ] new_path = path [ length - 1 ] new_parser = grandparent_subparser . add_parser ( new_path , cli_help = self . cli_help ) # Due to http://bugs.python.org/issue9253, we have to give the subparser # a destination and set it to required in order to get a meaningful error parent_subparser = new_parser . add_subparsers ( dest = '_subcommand' ) command_group = group_table . get ( ' ' . join ( parent_path ) , None ) deprecate_info = None if command_group : deprecate_info = command_group . group_kwargs . get ( 'deprecate_info' , None ) parent_subparser . required = True parent_subparser . deprecate_info = deprecate_info self . subparsers [ tuple ( path [ 0 : length ] ) ] = parent_subparser return parent_subparser
For each part of the path walk down the tree of subparsers creating new ones if one doesn t already exist .
511
24
223,282
def parse_args ( self , args = None , namespace = None ) : self . _expand_prefixed_files ( args ) return super ( CLICommandParser , self ) . parse_args ( args )
Overrides argparse . ArgumentParser . parse_args
47
12
223,283
def extract_full_summary_from_signature ( operation ) : lines = inspect . getdoc ( operation ) regex = r'\s*(:param)\s+(.+?)\s*:(.*)' summary = '' if lines : match = re . search ( regex , lines ) summary = lines [ : match . regs [ 0 ] [ 0 ] ] if match else lines summary = summary . replace ( '\n' , ' ' ) . replace ( '\r' , '' ) return summary
Extract the summary from the docstring of the command .
108
12
223,284
def get_runtime_version ( self ) : # pylint: disable=no-self-use import platform version_info = '\n\n' version_info += 'Python ({}) {}' . format ( platform . system ( ) , sys . version ) version_info += '\n\n' version_info += 'Python location \'{}\'' . format ( sys . executable ) version_info += '\n' return version_info
Get the runtime information .
97
5
223,285
def show_version ( self ) : version_info = self . get_cli_version ( ) version_info += self . get_runtime_version ( ) print ( version_info , file = self . out_file )
Print version information to the out file .
48
8
223,286
def unregister_event ( self , event_name , handler ) : try : self . _event_handlers [ event_name ] . remove ( handler ) except ValueError : pass
Unregister a callable that will be called when event is raised .
39
14
223,287
def raise_event ( self , event_name , * * kwargs ) : handlers = list ( self . _event_handlers [ event_name ] ) logger . debug ( 'Event: %s %s' , event_name , handlers ) for func in handlers : func ( self , * * kwargs )
Raise an event . Calls each handler in turn with kwargs
69
14
223,288
def exception_handler ( self , ex ) : # pylint: disable=no-self-use if isinstance ( ex , CLIError ) : logger . error ( ex ) else : logger . exception ( ex ) return 1
The default exception handler
48
4
223,289
def invoke ( self , args , initial_invocation_data = None , out_file = None ) : from . util import CommandResultItem if not isinstance ( args , ( list , tuple ) ) : raise TypeError ( 'args should be a list or tuple.' ) exit_code = 0 try : args = self . completion . get_completion_args ( ) or args out_file = out_file or self . out_file self . logging . configure ( args ) logger . debug ( 'Command arguments: %s' , args ) self . raise_event ( EVENT_CLI_PRE_EXECUTE ) if CLI . _should_show_version ( args ) : self . show_version ( ) self . result = CommandResultItem ( None ) else : self . invocation = self . invocation_cls ( cli_ctx = self , parser_cls = self . parser_cls , commands_loader_cls = self . commands_loader_cls , help_cls = self . help_cls , initial_data = initial_invocation_data ) cmd_result = self . invocation . execute ( args ) self . result = cmd_result exit_code = self . result . exit_code output_type = self . invocation . data [ 'output' ] if cmd_result and cmd_result . result is not None : formatter = self . output . get_formatter ( output_type ) self . output . out ( cmd_result , formatter = formatter , out_file = out_file ) self . raise_event ( EVENT_CLI_POST_EXECUTE ) except KeyboardInterrupt as ex : self . result = CommandResultItem ( None , error = ex ) exit_code = 1 except Exception as ex : # pylint: disable=broad-except exit_code = self . exception_handler ( ex ) self . result = CommandResultItem ( None , error = ex ) finally : pass self . result . exit_code = exit_code return exit_code
Invoke a command .
434
5
223,290
def get_logger ( module_name = None ) : if module_name : logger_name = '{}.{}' . format ( CLI_LOGGER_NAME , module_name ) else : logger_name = CLI_LOGGER_NAME return logging . getLogger ( logger_name )
Get the logger for a module . If no module name is given the current CLI logger is returned .
65
20
223,291
def configure ( self , args ) : verbose_level = self . _determine_verbose_level ( args ) log_level_config = self . console_log_configs [ verbose_level ] root_logger = logging . getLogger ( ) cli_logger = logging . getLogger ( CLI_LOGGER_NAME ) # Set the levels of the loggers to lowest level. # Handlers can override by choosing a higher level. root_logger . setLevel ( logging . DEBUG ) cli_logger . setLevel ( logging . DEBUG ) cli_logger . propagate = False if root_logger . handlers and cli_logger . handlers : # loggers already configured return self . _init_console_handlers ( root_logger , cli_logger , log_level_config ) if self . file_log_enabled : self . _init_logfile_handlers ( root_logger , cli_logger ) get_logger ( __name__ ) . debug ( "File logging enabled - writing logs to '%s'." , self . log_dir )
Configure the loggers with the appropriate log level etc .
246
12
223,292
def _determine_verbose_level ( self , args ) : verbose_level = 0 for arg in args : if arg == CLILogging . VERBOSE_FLAG : verbose_level += 1 elif arg == CLILogging . DEBUG_FLAG : verbose_level += 2 # Use max verbose level if too much verbosity specified. return min ( verbose_level , len ( self . console_log_configs ) - 1 )
Get verbose level by reading the arguments .
99
9
223,293
def enum_choice_list ( data ) : # transform enum types, otherwise assume list of string choices if not data : return { } try : choices = [ x . value for x in data ] except AttributeError : choices = data def _type ( value ) : return next ( ( x for x in choices if x . lower ( ) == value . lower ( ) ) , value ) if value else value params = { 'choices' : CaseInsensitiveList ( choices ) , 'type' : _type } return params
Creates the argparse choices and type kwargs for a supplied enum type or list of strings
110
20
223,294
def register_cli_argument ( self , scope , dest , argtype , * * kwargs ) : argument = CLIArgumentType ( overrides = argtype , * * kwargs ) self . arguments [ scope ] [ dest ] = argument
Add an argument to the argument registry
53
7
223,295
def get_cli_argument ( self , command , name ) : parts = command . split ( ) result = CLIArgumentType ( ) for index in range ( 0 , len ( parts ) + 1 ) : probe = ' ' . join ( parts [ 0 : index ] ) override = self . arguments . get ( probe , { } ) . get ( name , None ) if override : result . update ( override ) return result
Get the argument for the command after applying the scope hierarchy
89
11
223,296
def argument ( self , argument_dest , arg_type = None , * * kwargs ) : self . _check_stale ( ) if not self . _applicable ( ) : return deprecate_action = self . _handle_deprecations ( argument_dest , * * kwargs ) if deprecate_action : kwargs [ 'action' ] = deprecate_action self . command_loader . argument_registry . register_cli_argument ( self . command_scope , argument_dest , arg_type , * * kwargs )
Register an argument for the given command scope using a knack . arguments . CLIArgumentType
126
18
223,297
def positional ( self , argument_dest , arg_type = None , * * kwargs ) : self . _check_stale ( ) if not self . _applicable ( ) : return if self . command_scope not in self . command_loader . command_table : raise ValueError ( "command authoring error: positional argument '{}' cannot be registered to a group-level " "scope '{}'. It must be registered to a specific command." . format ( argument_dest , self . command_scope ) ) # Before adding the new positional arg, ensure that there are no existing positional arguments # registered for this command. command_args = self . command_loader . argument_registry . arguments [ self . command_scope ] positional_args = { k : v for k , v in command_args . items ( ) if v . settings . get ( 'options_list' ) == [ ] } if positional_args and argument_dest not in positional_args : raise CLIError ( "command authoring error: commands may have, at most, one positional argument. '{}' already " "has positional argument: {}." . format ( self . command_scope , ' ' . join ( positional_args . keys ( ) ) ) ) deprecate_action = self . _handle_deprecations ( argument_dest , * * kwargs ) if deprecate_action : kwargs [ 'action' ] = deprecate_action kwargs [ 'options_list' ] = [ ] self . command_loader . argument_registry . register_cli_argument ( self . command_scope , argument_dest , arg_type , * * kwargs )
Register a positional argument for the given command scope using a knack . arguments . CLIArgumentType
365
19
223,298
def extra ( self , argument_dest , * * kwargs ) : self . _check_stale ( ) if not self . _applicable ( ) : return if self . command_scope in self . command_loader . command_group_table : raise ValueError ( "command authoring error: extra argument '{}' cannot be registered to a group-level " "scope '{}'. It must be registered to a specific command." . format ( argument_dest , self . command_scope ) ) deprecate_action = self . _handle_deprecations ( argument_dest , * * kwargs ) if deprecate_action : kwargs [ 'action' ] = deprecate_action self . command_loader . extra_argument_registry [ self . command_scope ] [ argument_dest ] = CLICommandArgument ( argument_dest , * * kwargs )
Register extra parameters for the given command . Typically used to augment auto - command built commands to add more parameters than the specific SDK method introspected .
198
30
223,299
def load_command_table ( self , args ) : # pylint: disable=unused-argument self . cli_ctx . raise_event ( EVENT_CMDLOADER_LOAD_COMMAND_TABLE , cmd_tbl = self . command_table ) return OrderedDict ( self . command_table )
Load commands into the command table
72
6