idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
223,100
def requestOpenOrders ( self , all_clients = False ) : if all_clients : self . ibConn . reqAllOpenOrders ( ) self . ibConn . reqOpenOrders ( )
Request open orders - loads up orders that wasn t created using this session
45
14
223,101
def cancelHistoricalData ( self , contracts = None ) : if contracts == None : contracts = list ( self . contracts . values ( ) ) elif not isinstance ( contracts , list ) : contracts = [ contracts ] for contract in contracts : # tickerId = self.tickerId(contract.m_symbol) tickerId = self . tickerId ( self . contractString ( contract ) ) self . ibConn . cancelHistoricalData ( tickerId = tickerId )
cancel historical data stream
105
5
223,102
def getConId ( self , contract_identifier ) : details = self . contractDetails ( contract_identifier ) if len ( details [ "contracts" ] ) > 1 : return details [ "m_underConId" ] return details [ "m_summary" ] [ "m_conId" ]
Get contracts conId
67
4
223,103
def createComboContract ( self , symbol , legs , currency = "USD" , exchange = None ) : exchange = legs [ 0 ] . m_exchange if exchange is None else exchange contract_tuple = ( symbol , "BAG" , exchange , currency , "" , 0.0 , "" ) contract = self . createContract ( contract_tuple , comboLegs = legs ) return contract
Used for ComboLegs . Expecting list of legs
85
11
223,104
def order_to_dict ( order ) : default = Order ( ) return { field : val for field , val in vars ( order ) . items ( ) if val != getattr ( default , field , None ) }
Convert an IBPy Order object to a dict containing any non - default values .
47
17
223,105
def contract_to_dict ( contract ) : default = Contract ( ) return { field : val for field , val in vars ( contract ) . items ( ) if val != getattr ( default , field , None ) }
Convert an IBPy Contract object to a dict containing any non - default values .
47
17
223,106
def _get_utm_code ( zone , direction ) : dir_dict = { _Direction . NORTH : '6' , _Direction . SOUTH : '7' } return '{}{}{}' . format ( '32' , dir_dict [ direction ] , str ( zone ) . zfill ( 2 ) )
Get UTM code given a zone and direction
72
9
223,107
def _get_utm_name_value_pair ( zone , direction = _Direction . NORTH ) : name = 'UTM_{}{}' . format ( zone , direction . value ) epsg = _get_utm_code ( zone , direction ) return name , epsg
Get name and code for UTM coordinates
63
8
223,108
def _crs_parser ( cls , value ) : parsed_value = value if isinstance ( parsed_value , int ) : parsed_value = str ( parsed_value ) if isinstance ( parsed_value , str ) : parsed_value = parsed_value . strip ( 'epsgEPSG: ' ) return super ( _BaseCRS , cls ) . __new__ ( cls , parsed_value )
Parses user input for class CRS
92
9
223,109
def get_wfs_typename ( cls , data_source ) : is_eocloud = SHConfig ( ) . is_eocloud_ogc_url ( ) return { cls . SENTINEL2_L1C : 'S2.TILE' , cls . SENTINEL2_L2A : 'SEN4CAP_S2L2A.TILE' if is_eocloud else 'DSS2' , cls . SENTINEL1_IW : 'S1.TILE' if is_eocloud else 'DSS3' , cls . SENTINEL1_EW : 'S1_EW.TILE' if is_eocloud else 'DSS3' , cls . SENTINEL1_EW_SH : 'S1_EW_SH.TILE' if is_eocloud else 'DSS3' , cls . DEM : 'DSS4' , cls . MODIS : 'DSS5' , cls . LANDSAT8 : 'L8.TILE' if is_eocloud else 'DSS6' , # eocloud sources only: cls . LANDSAT5 : 'L5.TILE' , cls . LANDSAT7 : 'L7.TILE' , cls . SENTINEL3 : 'S3.TILE' , cls . SENTINEL5P : 'S5p_L2.TILE' , cls . ENVISAT_MERIS : 'ENV.TILE' , cls . SENTINEL2_L3B : 'SEN4CAP_S2L3B.TILE' , cls . LANDSAT8_L2A : 'SEN4CAP_L8L2A.TILE' } [ data_source ]
Maps data source to string identifier for WFS
416
9
223,110
def is_uswest_source ( self ) : return not SHConfig ( ) . is_eocloud_ogc_url ( ) and self . value [ 0 ] in [ _Source . LANDSAT8 , _Source . MODIS , _Source . DEM ]
Checks if data source via Sentinel Hub services is available at US West server
58
15
223,111
def get_available_sources ( cls ) : if SHConfig ( ) . is_eocloud_ogc_url ( ) : return [ cls . SENTINEL2_L1C , cls . SENTINEL2_L2A , cls . SENTINEL2_L3B , cls . SENTINEL1_IW , cls . SENTINEL1_EW , cls . SENTINEL1_EW_SH , cls . SENTINEL3 , cls . SENTINEL5P , cls . LANDSAT5 , cls . LANDSAT7 , cls . LANDSAT8 , cls . LANDSAT8_L2A , cls . ENVISAT_MERIS ] return [ cls . SENTINEL2_L1C , cls . SENTINEL2_L2A , cls . SENTINEL1_IW , cls . SENTINEL1_EW , cls . SENTINEL1_EW_SH , cls . DEM , cls . MODIS , cls . LANDSAT8 ]
Returns which data sources are available for configured Sentinel Hub OGC URL
256
13
223,112
def get_utm_from_wgs84 ( lng , lat ) : _ , _ , zone , _ = utm . from_latlon ( lat , lng ) direction = 'N' if lat >= 0 else 'S' return CRS [ 'UTM_{}{}' . format ( str ( zone ) , direction ) ]
Convert from WGS84 to UTM coordinate system
73
11
223,113
def has_value ( cls , value ) : return any ( value . lower ( ) == item . value . lower ( ) for item in cls )
Tests whether CustomUrlParam contains a constant defined with a string value
33
14
223,114
def canonical_extension ( fmt_ext ) : if MimeType . has_value ( fmt_ext ) : return fmt_ext try : return { 'tif' : MimeType . TIFF . value , 'jpeg' : MimeType . JPG . value , 'hdf5' : MimeType . HDF . value , 'h5' : MimeType . HDF . value } [ fmt_ext ] except KeyError : raise ValueError ( 'Data format .{} is not supported' . format ( fmt_ext ) )
Canonical extension of file format extension
120
8
223,115
def is_image_format ( self ) : return self in frozenset ( [ MimeType . TIFF , MimeType . TIFF_d8 , MimeType . TIFF_d16 , MimeType . TIFF_d32f , MimeType . PNG , MimeType . JP2 , MimeType . JPG ] )
Checks whether file format is an image format
77
9
223,116
def is_tiff_format ( self ) : return self in frozenset ( [ MimeType . TIFF , MimeType . TIFF_d8 , MimeType . TIFF_d16 , MimeType . TIFF_d32f ] )
Checks whether file format is a TIFF image format
58
11
223,117
def get_string ( self ) : if self in [ MimeType . TIFF_d8 , MimeType . TIFF_d16 , MimeType . TIFF_d32f ] : return 'image/{}' . format ( self . value ) if self is MimeType . JP2 : return 'image/jpeg2000' if self in [ MimeType . RAW , MimeType . REQUESTS_RESPONSE ] : return self . value return mimetypes . types_map [ '.' + self . value ]
Get file format as string
121
5
223,118
def get_expected_max_value ( self ) : try : return { MimeType . TIFF : 65535 , MimeType . TIFF_d8 : 255 , MimeType . TIFF_d16 : 65535 , MimeType . TIFF_d32f : 1.0 , MimeType . PNG : 255 , MimeType . JPG : 255 , MimeType . JP2 : 10000 } [ self ] except IndexError : raise ValueError ( 'Type {} is not supported by this method' . format ( self ) )
Returns max value of image MimeType format and raises an error if it is not an image format
119
20
223,119
def _filter_dates ( dates , time_difference ) : LOGGER . debug ( "dates=%s" , dates ) if len ( dates ) <= 1 : return dates sorted_dates = sorted ( dates ) separate_dates = [ sorted_dates [ 0 ] ] for curr_date in sorted_dates [ 1 : ] : if curr_date - separate_dates [ - 1 ] > time_difference : separate_dates . append ( curr_date ) return separate_dates
Filters out dates within time_difference preserving only the oldest date .
106
15
223,120
def _sentinel1_product_check ( product_id , data_source ) : props = product_id . split ( '_' ) acquisition , resolution , polarisation = props [ 1 ] , props [ 2 ] [ 3 ] , props [ 3 ] [ 2 : 4 ] if acquisition in [ 'IW' , 'EW' ] and resolution in [ 'M' , 'H' ] and polarisation in [ 'DV' , 'DH' , 'SV' , 'SH' ] : return acquisition == data_source . value [ 2 ] . name and polarisation == data_source . value [ 3 ] . name and resolution == data_source . value [ 4 ] . name [ 0 ] raise ValueError ( 'Unknown Sentinel-1 tile type: {}' . format ( product_id ) )
Checks if Sentinel - 1 product ID matches Sentinel - 1 DataSource configuration
174
15
223,121
def get_url ( self , request , * , date = None , size_x = None , size_y = None , geometry = None ) : url = self . get_base_url ( request ) authority = self . instance_id if hasattr ( self , 'instance_id' ) else request . theme params = self . _get_common_url_parameters ( request ) if request . service_type in ( ServiceType . WMS , ServiceType . WCS ) : params = { * * params , * * self . _get_wms_wcs_url_parameters ( request , date ) } if request . service_type is ServiceType . WMS : params = { * * params , * * self . _get_wms_url_parameters ( request , size_x , size_y ) } elif request . service_type is ServiceType . WCS : params = { * * params , * * self . _get_wcs_url_parameters ( request , size_x , size_y ) } elif request . service_type is ServiceType . FIS : params = { * * params , * * self . _get_fis_parameters ( request , geometry ) } return '{}/{}?{}' . format ( url , authority , urlencode ( params ) )
Returns url to Sentinel Hub s OGC service for the product specified by the OgcRequest and date .
287
21
223,122
def get_base_url ( self , request ) : url = self . base_url + request . service_type . value # These 2 lines are temporal and will be removed after the use of uswest url wont be required anymore: if hasattr ( request , 'data_source' ) and request . data_source . is_uswest_source ( ) : url = 'https://services-uswest2.sentinel-hub.com/ogc/{}' . format ( request . service_type . value ) if hasattr ( request , 'data_source' ) and request . data_source not in DataSource . get_available_sources ( ) : raise ValueError ( "{} is not available for service at ogc_base_url={}" . format ( request . data_source , SHConfig ( ) . ogc_base_url ) ) return url
Creates base url string .
188
6
223,123
def _get_common_url_parameters ( request ) : params = { 'SERVICE' : request . service_type . value } if hasattr ( request , 'maxcc' ) : params [ 'MAXCC' ] = 100.0 * request . maxcc if hasattr ( request , 'custom_url_params' ) and request . custom_url_params is not None : params = { * * params , * * { k . value : str ( v ) for k , v in request . custom_url_params . items ( ) } } if CustomUrlParam . EVALSCRIPT . value in params : evalscript = params [ CustomUrlParam . EVALSCRIPT . value ] params [ CustomUrlParam . EVALSCRIPT . value ] = b64encode ( evalscript . encode ( ) ) . decode ( ) if CustomUrlParam . GEOMETRY . value in params : geometry = params [ CustomUrlParam . GEOMETRY . value ] crs = request . bbox . crs if isinstance ( geometry , Geometry ) : if geometry . crs is not crs : raise ValueError ( 'Geometry object in custom_url_params should have the same CRS as given BBox' ) else : geometry = Geometry ( geometry , crs ) if geometry . crs is CRS . WGS84 : geometry = geometry . reverse ( ) params [ CustomUrlParam . GEOMETRY . value ] = geometry . wkt return params
Returns parameters common dictionary for WMS WCS and FIS request .
319
13
223,124
def _get_wms_wcs_url_parameters ( request , date ) : params = { 'BBOX' : str ( request . bbox . reverse ( ) ) if request . bbox . crs is CRS . WGS84 else str ( request . bbox ) , 'FORMAT' : MimeType . get_string ( request . image_format ) , 'CRS' : CRS . ogc_string ( request . bbox . crs ) , } if date is not None : start_date = date if request . time_difference < datetime . timedelta ( seconds = 0 ) else date - request . time_difference end_date = date if request . time_difference < datetime . timedelta ( seconds = 0 ) else date + request . time_difference params [ 'TIME' ] = '{}/{}' . format ( start_date . isoformat ( ) , end_date . isoformat ( ) ) return params
Returns parameters common dictionary for WMS and WCS request .
212
11
223,125
def _get_fis_parameters ( request , geometry ) : date_interval = parse_time_interval ( request . time ) params = { 'CRS' : CRS . ogc_string ( geometry . crs ) , 'LAYER' : request . layer , 'RESOLUTION' : request . resolution , 'TIME' : '{}/{}' . format ( date_interval [ 0 ] , date_interval [ 1 ] ) } if not isinstance ( geometry , ( BBox , Geometry ) ) : raise ValueError ( 'Each geometry must be an instance of sentinelhub.{} or sentinelhub.{} but {} ' 'found' . format ( BBox . __name__ , Geometry . __name__ , geometry ) ) if geometry . crs is CRS . WGS84 : geometry = geometry . reverse ( ) if isinstance ( geometry , Geometry ) : params [ 'GEOMETRY' ] = geometry . wkt else : params [ 'BBOX' ] = str ( geometry ) if request . bins : params [ 'BINS' ] = request . bins if request . histogram_type : params [ 'TYPE' ] = request . histogram_type . value return params
Returns parameters dictionary for FIS request .
270
8
223,126
def get_filename ( request , date , size_x , size_y ) : filename = '_' . join ( [ str ( request . service_type . value ) , request . layer , str ( request . bbox . crs ) , str ( request . bbox ) . replace ( ',' , '_' ) , '' if date is None else date . strftime ( "%Y-%m-%dT%H-%M-%S" ) , '{}X{}' . format ( size_x , size_y ) ] ) filename = OgcImageService . filename_add_custom_url_params ( filename , request ) return OgcImageService . finalize_filename ( filename , request . image_format )
Get filename location
162
3
223,127
def filename_add_custom_url_params ( filename , request ) : if hasattr ( request , 'custom_url_params' ) and request . custom_url_params is not None : for param , value in sorted ( request . custom_url_params . items ( ) , key = lambda parameter_item : parameter_item [ 0 ] . value ) : filename = '_' . join ( [ filename , param . value , str ( value ) ] ) return filename
Adds custom url parameters to filename string
101
7
223,128
def finalize_filename ( filename , file_format = None ) : for char in [ ' ' , '/' , '\\' , '|' , ';' , ':' , '\n' , '\t' ] : filename = filename . replace ( char , '' ) if file_format : suffix = str ( file_format . value ) if file_format . is_tiff_format ( ) and file_format is not MimeType . TIFF : suffix = str ( MimeType . TIFF . value ) filename = '_' . join ( [ filename , str ( file_format . value ) . replace ( ';' , '_' ) ] ) filename = '.' . join ( [ filename [ : 254 - len ( suffix ) ] , suffix ] ) LOGGER . debug ( "filename=%s" , filename ) return filename
Replaces invalid characters in filename string adds image extension and reduces filename length
185
14
223,129
def get_dates ( self , request ) : if DataSource . is_timeless ( request . data_source ) : return [ None ] date_interval = parse_time_interval ( request . time ) LOGGER . debug ( 'date_interval=%s' , date_interval ) if request . wfs_iterator is None : self . wfs_iterator = WebFeatureService ( request . bbox , date_interval , data_source = request . data_source , maxcc = request . maxcc , base_url = self . base_url , instance_id = self . instance_id ) else : self . wfs_iterator = request . wfs_iterator dates = sorted ( set ( self . wfs_iterator . get_dates ( ) ) ) if request . time is OgcConstants . LATEST : dates = dates [ - 1 : ] return OgcService . _filter_dates ( dates , request . time_difference )
Get available Sentinel - 2 acquisitions at least time_difference apart
210
13
223,130
def get_image_dimensions ( request ) : if request . service_type is ServiceType . WCS or ( isinstance ( request . size_x , int ) and isinstance ( request . size_y , int ) ) : return request . size_x , request . size_y if not isinstance ( request . size_x , int ) and not isinstance ( request . size_y , int ) : raise ValueError ( "At least one of parameters 'width' and 'height' must have an integer value" ) missing_dimension = get_image_dimension ( request . bbox , width = request . size_x , height = request . size_y ) if request . size_x is None : return missing_dimension , request . size_y if request . size_y is None : return request . size_x , missing_dimension raise ValueError ( "Parameters 'width' and 'height' must be integers or None" )
Verifies or calculates image dimensions .
201
7
223,131
def _fetch_features ( self ) : if self . feature_offset is None : return main_url = '{}{}/{}?' . format ( self . base_url , ServiceType . WFS . value , self . instance_id ) params = { 'SERVICE' : ServiceType . WFS . value , 'REQUEST' : 'GetFeature' , 'TYPENAMES' : DataSource . get_wfs_typename ( self . data_source ) , 'BBOX' : str ( self . bbox . reverse ( ) ) if self . bbox . crs is CRS . WGS84 else str ( self . bbox ) , 'OUTPUTFORMAT' : MimeType . get_string ( MimeType . JSON ) , 'SRSNAME' : CRS . ogc_string ( self . bbox . crs ) , 'TIME' : '{}/{}' . format ( self . time_interval [ 0 ] , self . time_interval [ 1 ] ) , 'MAXCC' : 100.0 * self . maxcc , 'MAXFEATURES' : SHConfig ( ) . max_wfs_records_per_query , 'FEATURE_OFFSET' : self . feature_offset } url = main_url + urlencode ( params ) LOGGER . debug ( "URL=%s" , url ) response = get_json ( url ) is_sentinel1 = self . data_source . is_sentinel1 ( ) for tile_info in response [ "features" ] : if not is_sentinel1 or self . _sentinel1_product_check ( tile_info [ 'properties' ] [ 'id' ] , self . data_source ) : self . tile_list . append ( tile_info ) if len ( response [ "features" ] ) < SHConfig ( ) . max_wfs_records_per_query : self . feature_offset = None else : self . feature_offset += SHConfig ( ) . max_wfs_records_per_query
Collects data from WFS service
455
7
223,132
def get_dates ( self ) : return [ datetime . datetime . strptime ( '{}T{}' . format ( tile_info [ 'properties' ] [ 'date' ] , tile_info [ 'properties' ] [ 'time' ] . split ( '.' ) [ 0 ] ) , '%Y-%m-%dT%H:%M:%S' ) for tile_info in self ]
Returns a list of acquisition times from tile info data
95
10
223,133
def _parse_tile_url ( tile_url ) : props = tile_url . rsplit ( '/' , 7 ) return '' . join ( props [ 1 : 4 ] ) , '-' . join ( props [ 4 : 7 ] ) , int ( props [ 7 ] )
Extracts tile name data and AWS index from tile URL
60
12
223,134
def get_dates_in_range ( start_date , end_date ) : start_dt = iso_to_datetime ( start_date ) end_dt = iso_to_datetime ( end_date ) num_days = int ( ( end_dt - start_dt ) . days ) return [ datetime_to_iso ( start_dt + datetime . timedelta ( i ) ) for i in range ( num_days + 1 ) ]
Get all dates within input start and end date in ISO 8601 format
100
14
223,135
def iso_to_datetime ( date ) : chunks = list ( map ( int , date . split ( 'T' ) [ 0 ] . split ( '-' ) ) ) return datetime . datetime ( chunks [ 0 ] , chunks [ 1 ] , chunks [ 2 ] )
Convert ISO 8601 time format to datetime format
60
11
223,136
def datetime_to_iso ( date , only_date = True ) : if only_date : return date . isoformat ( ) . split ( 'T' ) [ 0 ] return date . isoformat ( )
Convert datetime format to ISO 8601 time format
46
11
223,137
def aws ( product , tile , folder , redownload , info , entire , bands , l2a ) : band_list = None if bands is None else bands . split ( ',' ) data_source = DataSource . SENTINEL2_L2A if l2a else DataSource . SENTINEL2_L1C if info : if product is None : click . echo ( get_safe_format ( tile = tile , entire_product = entire , data_source = data_source ) ) else : click . echo ( get_safe_format ( product_id = product ) ) else : if product is None : download_safe_format ( tile = tile , folder = folder , redownload = redownload , entire_product = entire , bands = band_list , data_source = data_source ) else : download_safe_format ( product_id = product , folder = folder , redownload = redownload , bands = band_list )
Download Sentinel - 2 data from Sentinel - 2 on AWS to ESA SAFE format . Download uses multiple threads .
212
22
223,138
def _config_options ( func ) : for param in SHConfig ( ) . get_params ( ) [ - 1 : : - 1 ] : func = click . option ( '--{}' . format ( param ) , param , help = 'Set new values to configuration parameter "{}"' . format ( param ) ) ( func ) return func
A helper function which joins click . option functions of each parameter from config . json
73
16
223,139
def config ( show , reset , * * params ) : sh_config = SHConfig ( ) if reset : sh_config . reset ( ) for param , value in params . items ( ) : if value is not None : try : value = int ( value ) except ValueError : if value . lower ( ) == 'true' : value = True elif value . lower ( ) == 'false' : value = False if getattr ( sh_config , param ) != value : setattr ( sh_config , param , value ) old_config = SHConfig ( ) sh_config . save ( ) for param in sh_config . get_params ( ) : if sh_config [ param ] != old_config [ param ] : value = sh_config [ param ] if isinstance ( value , str ) : value = "'{}'" . format ( value ) click . echo ( "The value of parameter '{}' was updated to {}" . format ( param , value ) ) if show : click . echo ( str ( sh_config ) ) click . echo ( 'Configuration file location: {}' . format ( sh_config . get_config_location ( ) ) )
Inspect and configure parameters in your local sentinelhub configuration file
250
13
223,140
def download ( url , filename , redownload ) : data_folder , filename = filename . rsplit ( '/' , 1 ) download_list = [ DownloadRequest ( url = url , data_folder = data_folder , filename = filename , save_response = True , return_data = False ) ] download_data ( download_list , redownload = redownload )
Download from custom created URL into custom created file path
81
10
223,141
def save ( self ) : is_changed = False for prop in self . _instance . CONFIG_PARAMS : if getattr ( self , prop ) != getattr ( self . _instance , prop ) : is_changed = True setattr ( self . _instance , prop , getattr ( self , prop ) ) if is_changed : self . _instance . save_configuration ( )
Method that saves configuration parameter changes from instance of SHConfig class to global config class and to config . json file .
83
23
223,142
def _reset_param ( self , param ) : if param not in self . _instance . CONFIG_PARAMS : raise ValueError ( "Cannot reset unknown parameter '{}'" . format ( param ) ) setattr ( self , param , self . _instance . CONFIG_PARAMS [ param ] )
Resets a single parameter
65
5
223,143
def get_config_dict ( self ) : return OrderedDict ( ( prop , getattr ( self , prop ) ) for prop in self . _instance . CONFIG_PARAMS )
Get a dictionary representation of SHConfig class
40
8
223,144
def bbox_to_resolution ( bbox , width , height ) : utm_bbox = to_utm_bbox ( bbox ) east1 , north1 = utm_bbox . lower_left east2 , north2 = utm_bbox . upper_right return abs ( east2 - east1 ) / width , abs ( north2 - north1 ) / height
Calculates pixel resolution in meters for a given bbox of a given width and height .
84
19
223,145
def get_image_dimension ( bbox , width = None , height = None ) : utm_bbox = to_utm_bbox ( bbox ) east1 , north1 = utm_bbox . lower_left east2 , north2 = utm_bbox . upper_right if isinstance ( width , int ) : return round ( width * abs ( north2 - north1 ) / abs ( east2 - east1 ) ) return round ( height * abs ( east2 - east1 ) / abs ( north2 - north1 ) )
Given bounding box and one of the parameters width or height it will return the other parameter that will best fit the bounding box dimensions
120
27
223,146
def to_utm_bbox ( bbox ) : if CRS . is_utm ( bbox . crs ) : return bbox lng , lat = bbox . middle utm_crs = get_utm_crs ( lng , lat , source_crs = bbox . crs ) return bbox . transform ( utm_crs )
Transform bbox into UTM CRS
80
8
223,147
def get_utm_bbox ( img_bbox , transform ) : east1 , north1 = pixel_to_utm ( img_bbox [ 0 ] , img_bbox [ 1 ] , transform ) east2 , north2 = pixel_to_utm ( img_bbox [ 2 ] , img_bbox [ 3 ] , transform ) return [ east1 , north1 , east2 , north2 ]
Get UTM coordinates given a bounding box in pixels and a transform
90
14
223,148
def wgs84_to_utm ( lng , lat , utm_crs = None ) : if utm_crs is None : utm_crs = get_utm_crs ( lng , lat ) return transform_point ( ( lng , lat ) , CRS . WGS84 , utm_crs )
Convert WGS84 coordinates to UTM . If UTM CRS is not set it will be calculated automatically .
75
24
223,149
def utm_to_pixel ( east , north , transform , truncate = True ) : column = ( east - transform [ 0 ] ) / transform [ 1 ] row = ( north - transform [ 3 ] ) / transform [ 5 ] if truncate : return int ( row + ERR ) , int ( column + ERR ) return row , column
Convert UTM coordinate to image coordinate given a transform
74
11
223,150
def pixel_to_utm ( row , column , transform ) : east = transform [ 0 ] + column * transform [ 1 ] north = transform [ 3 ] + row * transform [ 5 ] return east , north
Convert pixel coordinate to UTM coordinate given a transform
44
11
223,151
def wgs84_to_pixel ( lng , lat , transform , utm_epsg = None , truncate = True ) : east , north = wgs84_to_utm ( lng , lat , utm_epsg ) row , column = utm_to_pixel ( east , north , transform , truncate = truncate ) return row , column
Convert WGS84 coordinates to pixel image coordinates given transform and UTM CRS . If no CRS is given it will be calculated it automatically .
80
31
223,152
def transform_point ( point , source_crs , target_crs ) : if source_crs == target_crs : return point old_x , old_y = point new_x , new_y = pyproj . transform ( CRS . projection ( source_crs ) , CRS . projection ( target_crs ) , old_x , old_y ) return new_x , new_y
Maps point form src_crs to tgt_crs
92
13
223,153
def transform_bbox ( bbox , target_crs ) : warnings . warn ( "This function is deprecated, use BBox.transform method instead" , DeprecationWarning , stacklevel = 2 ) return bbox . transform ( target_crs )
Maps bbox from current crs to target_crs
55
12
223,154
def get_safe_format ( product_id = None , tile = None , entire_product = False , bands = None , data_source = DataSource . SENTINEL2_L1C ) : entire_product = entire_product and product_id is None if tile is not None : safe_tile = SafeTile ( tile_name = tile [ 0 ] , time = tile [ 1 ] , bands = bands , data_source = data_source ) if not entire_product : return safe_tile . get_safe_struct ( ) product_id = safe_tile . get_product_id ( ) if product_id is None : raise ValueError ( 'Either product_id or tile must be specified' ) safe_product = SafeProduct ( product_id , tile_list = [ tile [ 0 ] ] , bands = bands ) if entire_product else SafeProduct ( product_id , bands = bands ) return safe_product . get_safe_struct ( )
Returns . SAFE format structure in form of nested dictionaries . Either product_id or tile must be specified .
209
23
223,155
def download_safe_format ( product_id = None , tile = None , folder = '.' , redownload = False , entire_product = False , bands = None , data_source = DataSource . SENTINEL2_L1C ) : entire_product = entire_product and product_id is None if tile is not None : safe_request = AwsTileRequest ( tile = tile [ 0 ] , time = tile [ 1 ] , data_folder = folder , bands = bands , safe_format = True , data_source = data_source ) if entire_product : safe_tile = safe_request . get_aws_service ( ) product_id = safe_tile . get_product_id ( ) if product_id is not None : safe_request = AwsProductRequest ( product_id , tile_list = [ tile [ 0 ] ] , data_folder = folder , bands = bands , safe_format = True ) if entire_product else AwsProductRequest ( product_id , data_folder = folder , bands = bands , safe_format = True ) safe_request . save_data ( redownload = redownload )
Downloads . SAFE format structure in form of nested dictionaries . Either product_id or tile must be specified .
251
24
223,156
def save_data ( self , * , data_filter = None , redownload = False , max_threads = None , raise_download_errors = False ) : self . _preprocess_request ( True , False ) self . _execute_data_download ( data_filter , redownload , max_threads , raise_download_errors )
Saves data to disk . If redownload = True then the data is redownloaded using max_threads workers .
77
26
223,157
def _execute_data_download ( self , data_filter , redownload , max_threads , raise_download_errors ) : is_repeating_filter = False if data_filter is None : filtered_download_list = self . download_list elif isinstance ( data_filter , ( list , tuple ) ) : try : filtered_download_list = [ self . download_list [ index ] for index in data_filter ] except IndexError : raise IndexError ( 'Indices of data_filter are out of range' ) filtered_download_list , mapping_list = self . _filter_repeating_items ( filtered_download_list ) is_repeating_filter = len ( filtered_download_list ) < len ( mapping_list ) else : raise ValueError ( 'data_filter parameter must be a list of indices' ) data_list = [ ] for future in download_data ( filtered_download_list , redownload = redownload , max_threads = max_threads ) : try : data_list . append ( future . result ( timeout = SHConfig ( ) . download_timeout_seconds ) ) except ImageDecodingError as err : data_list . append ( None ) LOGGER . debug ( '%s while downloading data; will try to load it from disk if it was saved' , err ) except DownloadFailedException as download_exception : if raise_download_errors : raise download_exception warnings . warn ( str ( download_exception ) ) data_list . append ( None ) if is_repeating_filter : data_list = [ copy . deepcopy ( data_list [ index ] ) for index in mapping_list ] return data_list
Calls download module and executes the download process
370
9
223,158
def _filter_repeating_items ( download_list ) : unique_requests_map = { } mapping_list = [ ] unique_download_list = [ ] for download_request in download_list : if download_request not in unique_requests_map : unique_requests_map [ download_request ] = len ( unique_download_list ) unique_download_list . append ( download_request ) mapping_list . append ( unique_requests_map [ download_request ] ) return unique_download_list , mapping_list
Because of data_filter some requests in download list might be the same . In order not to download them again this method will reduce the list of requests . It will also return a mapping list which can be used to reconstruct the previous list of download requests .
119
51
223,159
def _preprocess_request ( self , save_data , return_data ) : if not self . is_valid_request ( ) : raise ValueError ( 'Cannot obtain data because request is invalid' ) if save_data and self . data_folder is None : raise ValueError ( 'Request parameter `data_folder` is not specified. ' 'In order to save data please set `data_folder` to location on your disk.' ) for download_request in self . download_list : download_request . set_save_response ( save_data ) download_request . set_return_data ( return_data ) download_request . set_data_folder ( self . data_folder ) if save_data : for folder in self . folder_list : make_folder ( os . path . join ( self . data_folder , folder ) )
Prepares requests for download and creates empty folders
183
9
223,160
def _add_saved_data ( self , data_list , data_filter , raise_download_errors ) : filtered_download_list = self . download_list if data_filter is None else [ self . download_list [ index ] for index in data_filter ] for i , request in enumerate ( filtered_download_list ) : if request . return_data and data_list [ i ] is None : if os . path . exists ( request . get_file_path ( ) ) : data_list [ i ] = read_data ( request . get_file_path ( ) ) elif raise_download_errors : raise DownloadFailedException ( 'Failed to download data from {}.\n No previously downloaded data ' 'exists in file {}.' . format ( request . url , request . get_file_path ( ) ) ) return data_list
Adds already saved data that was not redownloaded to the requested data list .
189
16
223,161
def create_request ( self , reset_gpd_iterator = False ) : if reset_gpd_iterator : self . gpd_iterator = None gpd_service = GeopediaImageService ( ) self . download_list = gpd_service . get_request ( self ) self . gpd_iterator = gpd_service . get_gpd_iterator ( )
Set a list of download requests
82
6
223,162
def provide_session ( self , start_new = False ) : if self . is_global : self . _session_info = self . _global_session_info self . _session_start = self . _global_session_start if self . _session_info is None or start_new or datetime . datetime . now ( ) > self . _session_start + self . SESSION_DURATION : self . _start_new_session ( ) return self . _session_info
Makes sure that session is still valid and provides session info
108
12
223,163
def _start_new_session ( self ) : self . _session_start = datetime . datetime . now ( ) session_id = self . _parse_session_id ( self . _session_info ) if self . _session_info else '' session_url = '{}data/v1/session/create?locale=en&sid={}' . format ( self . base_url , session_id ) self . _session_info = get_json ( session_url ) if self . username and self . password and self . _parse_user_id ( self . _session_info ) == self . UNAUTHENTICATED_USER_ID : self . _make_login ( ) if self . is_global : GeopediaSession . _global_session_info = self . _session_info GeopediaSession . _global_session_start = self . _session_start
Starts a new session and calculates when the new session will end . If username and password are provided it will also make login .
199
26
223,164
def _make_login ( self ) : login_url = '{}data/v1/session/login?user={}&pass={}&sid={}' . format ( self . base_url , self . username , self . password , self . _parse_session_id ( self . _session_info ) ) self . _session_info = get_json ( login_url )
Private method that makes login
86
5
223,165
def _get_items ( self , request ) : if request . gpd_iterator is None : self . gpd_iterator = GeopediaFeatureIterator ( request . layer , bbox = request . bbox , base_url = self . base_url , gpd_session = request . gpd_session ) else : self . gpd_iterator = request . gpd_iterator field_iter = self . gpd_iterator . get_field_iterator ( request . image_field_name ) items = [ ] for field_items in field_iter : # an image field can have multiple images for item in field_items : if not item [ 'mimeType' ] . startswith ( 'image/' ) : continue mime_type = MimeType . from_string ( item [ 'mimeType' ] [ 6 : ] ) if mime_type is request . image_format : items . append ( item ) return items
Collects data from Geopedia layer and returns list of features
204
13
223,166
def _get_filename ( request , item ) : if request . keep_image_names : filename = OgcImageService . finalize_filename ( item [ 'niceName' ] . replace ( ' ' , '_' ) ) else : filename = OgcImageService . finalize_filename ( '_' . join ( [ str ( GeopediaService . _parse_layer ( request . layer ) ) , item [ 'objectPath' ] . rsplit ( '/' , 1 ) [ - 1 ] ] ) , request . image_format ) LOGGER . debug ( "filename=%s" , filename ) return filename
Creates a filename
134
4
223,167
def _fetch_features ( self ) : if self . next_page_url is None : return response = get_json ( self . next_page_url , post_values = self . query , headers = self . gpd_session . session_headers ) self . features . extend ( response [ 'features' ] ) self . next_page_url = response [ 'pagination' ] [ 'next' ] self . layer_size = response [ 'pagination' ] [ 'total' ]
Retrieves a new page of features from Geopedia
110
12
223,168
def get_folder_list ( folder = '.' ) : dir_list = get_content_list ( folder ) return [ f for f in dir_list if not os . path . isfile ( os . path . join ( folder , f ) ) ]
Get list of sub - folders contained in input folder
55
10
223,169
def create_parent_folder ( filename ) : path = os . path . dirname ( filename ) if path != '' : make_folder ( path )
Create parent folder for input filename recursively
32
9
223,170
def make_folder ( path ) : if not os . path . exists ( path ) : try : os . makedirs ( path ) except OSError as exception : if exception . errno != errno . EEXIST : raise ValueError ( 'Specified folder is not writable: %s' '\nPlease check permissions or set a new valid folder.' % path )
Create folder at input path recursively
82
8
223,171
def rename ( old_path , new_path , edit_folders = True ) : if edit_folders : os . renames ( old_path , new_path ) else : os . rename ( old_path , new_path )
Rename files or folders
52
5
223,172
def size ( pathname ) : if os . path . isfile ( pathname ) : return os . path . getsize ( pathname ) return sum ( [ size ( '{}/{}' . format ( pathname , name ) ) for name in get_content_list ( pathname ) ] )
Returns size of a file or folder in Bytes
66
10
223,173
def middle ( self ) : return ( self . min_x + self . max_x ) / 2 , ( self . min_y + self . max_y ) / 2
Returns the middle point of the bounding box
38
9
223,174
def reverse ( self ) : return BBox ( ( self . min_y , self . min_x , self . max_y , self . max_x ) , crs = self . crs )
Returns a new BBox object where x and y coordinates are switched
44
13
223,175
def transform ( self , crs ) : new_crs = CRS ( crs ) return BBox ( ( transform_point ( self . lower_left , self . crs , new_crs ) , transform_point ( self . upper_right , self . crs , new_crs ) ) , crs = new_crs )
Transforms BBox from current CRS to target CRS
76
12
223,176
def get_polygon ( self , reverse = False ) : bbox = self . reverse ( ) if reverse else self polygon = ( ( bbox . min_x , bbox . min_y ) , ( bbox . min_x , bbox . max_y ) , ( bbox . max_x , bbox . max_y ) , ( bbox . max_x , bbox . min_y ) , ( bbox . min_x , bbox . min_y ) ) return polygon
Returns a tuple of coordinates of 5 points describing a polygon . Points are listed in clockwise order first point is the same as the last .
112
29
223,177
def get_partition ( self , num_x = 1 , num_y = 1 ) : size_x , size_y = ( self . max_x - self . min_x ) / num_x , ( self . max_y - self . min_y ) / num_y return [ [ BBox ( [ self . min_x + i * size_x , self . min_y + j * size_y , self . min_x + ( i + 1 ) * size_x , self . min_y + ( j + 1 ) * size_y ] , crs = self . crs ) for j in range ( num_y ) ] for i in range ( num_x ) ]
Partitions bounding box into smaller bounding boxes of the same size .
156
15
223,178
def get_transform_vector ( self , resx , resy ) : return self . x_min , self . _parse_resolution ( resx ) , 0 , self . y_max , 0 , - self . _parse_resolution ( resy )
Given resolution it returns a transformation vector
55
7
223,179
def _parse_resolution ( res ) : if isinstance ( res , str ) : return float ( res . strip ( 'm' ) ) if isinstance ( res , ( int , float ) ) : return float ( res ) raise TypeError ( 'Resolution should be a float, got resolution of type {}' . format ( type ( res ) ) )
Helper method for parsing given resolution . It will also try to parse a string into float
75
17
223,180
def _tuple_from_list_or_tuple ( bbox ) : if len ( bbox ) == 4 : return tuple ( map ( float , bbox ) ) if len ( bbox ) == 2 and all ( [ isinstance ( point , ( list , tuple ) ) for point in bbox ] ) : return BBox . _tuple_from_list_or_tuple ( bbox [ 0 ] + bbox [ 1 ] ) raise TypeError ( 'Expected a valid list or tuple representation of a bbox' )
Converts a list or tuple representation of a bbox into a flat tuple representation .
117
17
223,181
def _tuple_from_str ( bbox ) : return tuple ( [ float ( s ) for s in bbox . replace ( ',' , ' ' ) . split ( ) if s ] )
Parses a string of numbers separated by any combination of commas and spaces
43
16
223,182
def reverse ( self ) : return Geometry ( shapely . ops . transform ( lambda x , y : ( y , x ) , self . geometry ) , crs = self . crs )
Returns a new Geometry object where x and y coordinates are switched
41
13
223,183
def transform ( self , crs ) : new_crs = CRS ( crs ) geometry = self . geometry if new_crs is not self . crs : project = functools . partial ( pyproj . transform , self . crs . projection ( ) , new_crs . projection ( ) ) geometry = shapely . ops . transform ( project , geometry ) return Geometry ( geometry , crs = new_crs )
Transforms Geometry from current CRS to target CRS
96
12
223,184
def _parse_geometry ( geometry ) : if isinstance ( geometry , str ) : geometry = shapely . wkt . loads ( geometry ) elif isinstance ( geometry , dict ) : geometry = shapely . geometry . shape ( geometry ) elif not isinstance ( geometry , shapely . geometry . base . BaseGeometry ) : raise TypeError ( 'Unsupported geometry representation' ) if not isinstance ( geometry , ( shapely . geometry . Polygon , shapely . geometry . MultiPolygon ) ) : raise ValueError ( 'Supported geometry types are polygon and multipolygon, got {}' . format ( type ( geometry ) ) ) return geometry
Parses given geometry into shapely object
140
9
223,185
def transform ( self , crs ) : return BBoxCollection ( [ bbox . transform ( crs ) for bbox in self . bbox_list ] )
Transforms BBoxCollection from current CRS to target CRS
35
13
223,186
def _get_geometry ( self ) : return shapely . geometry . MultiPolygon ( [ bbox . geometry for bbox in self . bbox_list ] )
Creates a multipolygon of bounding box polygons
37
12
223,187
def _parse_bbox_list ( bbox_list ) : if isinstance ( bbox_list , BBoxCollection ) : return bbox_list . bbox_list , bbox_list . crs if not isinstance ( bbox_list , list ) or not bbox_list : raise ValueError ( 'Expected non-empty list of BBox objects' ) for bbox in bbox_list : if not isinstance ( bbox , BBox ) : raise ValueError ( 'Elements in the list should be of type {}, got {}' . format ( BBox . __name__ , type ( bbox ) ) ) crs = bbox_list [ 0 ] . crs for bbox in bbox_list : if bbox . crs is not crs : raise ValueError ( 'All bounding boxes should have the same CRS' ) return bbox_list , crs
Helper method for parsing a list of bounding boxes
198
10
223,188
def read_data ( filename , data_format = None ) : if not os . path . exists ( filename ) : raise ValueError ( 'Filename {} does not exist' . format ( filename ) ) if not isinstance ( data_format , MimeType ) : data_format = get_data_format ( filename ) if data_format . is_tiff_format ( ) : return read_tiff_image ( filename ) if data_format is MimeType . JP2 : return read_jp2_image ( filename ) if data_format . is_image_format ( ) : return read_image ( filename ) try : return { MimeType . TXT : read_text , MimeType . CSV : read_csv , MimeType . JSON : read_json , MimeType . XML : read_xml , MimeType . GML : read_xml , MimeType . SAFE : read_xml } [ data_format ] ( filename ) except KeyError : raise ValueError ( 'Reading data format .{} is not supported' . format ( data_format . value ) )
Read image data from file
238
5
223,189
def read_jp2_image ( filename ) : # Other option: # return glymur.Jp2k(filename)[:] image = read_image ( filename ) with open ( filename , 'rb' ) as file : bit_depth = get_jp2_bit_depth ( file ) return fix_jp2_image ( image , bit_depth )
Read data from JPEG2000 file
77
6
223,190
def read_csv ( filename , delimiter = CSV_DELIMITER ) : with open ( filename , 'r' ) as file : return list ( csv . reader ( file , delimiter = delimiter ) )
Read data from CSV file
48
5
223,191
def write_data ( filename , data , data_format = None , compress = False , add = False ) : create_parent_folder ( filename ) if not isinstance ( data_format , MimeType ) : data_format = get_data_format ( filename ) if data_format . is_tiff_format ( ) : return write_tiff_image ( filename , data , compress ) if data_format . is_image_format ( ) : return write_image ( filename , data ) if data_format is MimeType . TXT : return write_text ( filename , data , add = add ) try : return { MimeType . CSV : write_csv , MimeType . JSON : write_json , MimeType . XML : write_xml , MimeType . GML : write_xml } [ data_format ] ( filename , data ) except KeyError : raise ValueError ( 'Writing data format .{} is not supported' . format ( data_format . value ) )
Write image data to file
217
5
223,192
def write_tiff_image ( filename , image , compress = False ) : if compress : return tiff . imsave ( filename , image , compress = 'lzma' ) # loseless compression, works very well on masks return tiff . imsave ( filename , image )
Write image data to TIFF file
60
7
223,193
def write_image ( filename , image ) : data_format = get_data_format ( filename ) if data_format is MimeType . JPG : LOGGER . warning ( 'Warning: jpeg is a lossy format therefore saved data will be modified.' ) return Image . fromarray ( image ) . save ( filename )
Write image data to PNG JPG file
70
8
223,194
def write_text ( filename , data , add = False ) : write_type = 'a' if add else 'w' with open ( filename , write_type ) as file : print ( data , end = '' , file = file )
Write image data to text file
51
6
223,195
def write_csv ( filename , data , delimiter = CSV_DELIMITER ) : with open ( filename , 'w' ) as file : csv_writer = csv . writer ( file , delimiter = delimiter ) for line in data : csv_writer . writerow ( line )
Write image data to CSV file
66
6
223,196
def write_json ( filename , data ) : with open ( filename , 'w' ) as file : json . dump ( data , file , indent = 4 , sort_keys = True )
Write data to JSON file
40
5
223,197
def get_jp2_bit_depth ( stream ) : stream . seek ( 0 ) while True : read_buffer = stream . read ( 8 ) if len ( read_buffer ) < 8 : raise ValueError ( 'Image Header Box not found in Jpeg2000 file' ) _ , box_id = struct . unpack ( '>I4s' , read_buffer ) if box_id == b'ihdr' : read_buffer = stream . read ( 14 ) params = struct . unpack ( '>IIHBBBB' , read_buffer ) return ( params [ 3 ] & 0x7f ) + 1
Reads bit encoding depth of jpeg2000 file in binary stream format
135
14
223,198
def fix_jp2_image ( image , bit_depth ) : if bit_depth in [ 8 , 16 ] : return image if bit_depth == 15 : try : return image >> 1 except TypeError : raise IOError ( 'Failed to read JPEG 2000 image correctly. Most likely reason is that Pillow did not ' 'install OpenJPEG library correctly. Try reinstalling Pillow from a wheel' ) raise ValueError ( 'Bit depth {} of jp2 image is currently not supported. ' 'Please raise an issue on package Github page' . format ( bit_depth ) )
Because Pillow library incorrectly reads JPEG 2000 images with 15 - bit encoding this function corrects the values in image .
125
23
223,199
def download_data ( request_list , redownload = False , max_threads = None ) : _check_if_must_download ( request_list , redownload ) LOGGER . debug ( "Using max_threads=%s for %s requests" , max_threads , len ( request_list ) ) with concurrent . futures . ThreadPoolExecutor ( max_workers = max_threads ) as executor : return [ executor . submit ( execute_download_request , request ) for request in request_list ]
Download all requested data or read data from disk if already downloaded and available and redownload is not required .
117
22