idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
24,200
def mapToPixel ( mX , mY , geoTransform ) : mX = np . asarray ( mX ) mY = np . asarray ( mY ) if geoTransform [ 2 ] + geoTransform [ 4 ] == 0 : pX = ( ( mX - geoTransform [ 0 ] ) / geoTransform [ 1 ] ) - 0.5 pY = ( ( mY - geoTransform [ 3 ] ) / geoTransform [ 5 ] ) - 0.5 else : pX , pY = applyGeoTransform ( mX , mY , invertGeoTransform ( geoTransform ) ) return pX , pY
Convert map coordinates to pixel coordinates based on geotransform Accepts float or NumPy arrays
24,201
def pixelToMap ( pX , pY , geoTransform ) : pX = np . asarray ( pX , dtype = float ) pY = np . asarray ( pY , dtype = float ) pX += 0.5 pY += 0.5 mX , mY = applyGeoTransform ( pX , pY , geoTransform ) return mX , mY
Convert pixel coordinates to map coordinates based on geotransform Accepts float or NumPy arrays
24,202
def mem_ds ( res , extent , srs = None , dtype = gdal . GDT_Float32 ) : dst_ns = int ( ( extent [ 2 ] - extent [ 0 ] ) / res + 0.99 ) dst_nl = int ( ( extent [ 3 ] - extent [ 1 ] ) / res + 0.99 ) m_ds = gdal . GetDriverByName ( 'MEM' ) . Create ( '' , dst_ns , dst_nl , 1 , dtype ) m_gt = [ extent [ 0 ] , res , 0 , extent [ 3 ] , 0 , - res ] m_ds . SetGeoTransform ( m_gt ) if srs is not None : m_ds . SetProjection ( srs . ExportToWkt ( ) ) return m_ds
Create a new GDAL Dataset in memory
24,203
def copyproj ( src_fn , dst_fn , gt = True ) : src_ds = gdal . Open ( src_fn , gdal . GA_ReadOnly ) dst_ds = gdal . Open ( dst_fn , gdal . GA_Update ) dst_ds . SetProjection ( src_ds . GetProjection ( ) ) if gt : src_gt = np . array ( src_ds . GetGeoTransform ( ) ) src_dim = np . array ( [ src_ds . RasterXSize , src_ds . RasterYSize ] ) dst_dim = np . array ( [ dst_ds . RasterXSize , dst_ds . RasterYSize ] ) if np . any ( src_dim != dst_dim ) : res_factor = src_dim / dst_dim . astype ( float ) src_gt [ [ 1 , 5 ] ] *= max ( res_factor ) dst_ds . SetGeoTransform ( src_gt ) src_ds = None dst_ds = None
Copy projection and geotransform from one raster file to another
24,204
def geom_transform ( geom , t_srs ) : s_srs = geom . GetSpatialReference ( ) if not s_srs . IsSame ( t_srs ) : ct = osr . CoordinateTransformation ( s_srs , t_srs ) geom . Transform ( ct ) geom . AssignSpatialReference ( t_srs )
Transform a geometry in place
24,205
def shp_dict ( shp_fn , fields = None , geom = True ) : from pygeotools . lib import timelib ds = ogr . Open ( shp_fn ) lyr = ds . GetLayer ( ) nfeat = lyr . GetFeatureCount ( ) print ( '%i input features\n' % nfeat ) if fields is None : fields = shp_fieldnames ( lyr ) d_list = [ ] for n , feat in enumerate ( lyr ) : d = { } if geom : geom = feat . GetGeometryRef ( ) d [ 'geom' ] = geom for f_name in fields : i = str ( feat . GetField ( f_name ) ) if 'date' in f_name : i = i . rsplit ( '.' ) [ 0 ] i = timelib . strptime_fuzzy ( str ( i ) ) d [ f_name ] = i d_list . append ( d ) return d_list
Get a dictionary for all features in a shapefile Optionally specify fields
24,206
def lyr_proj ( lyr , t_srs , preserve_fields = True ) : s_srs = lyr . GetSpatialRef ( ) cT = osr . CoordinateTransformation ( s_srs , t_srs ) drv = ogr . GetDriverByName ( 'Memory' ) out_ds = drv . CreateDataSource ( 'out' ) outlyr = out_ds . CreateLayer ( 'out' , srs = t_srs , geom_type = lyr . GetGeomType ( ) ) if preserve_fields : inLayerDefn = lyr . GetLayerDefn ( ) for i in range ( 0 , inLayerDefn . GetFieldCount ( ) ) : fieldDefn = inLayerDefn . GetFieldDefn ( i ) outlyr . CreateField ( fieldDefn ) outLayerDefn = outlyr . GetLayerDefn ( ) inFeature = lyr . GetNextFeature ( ) while inFeature : geom = inFeature . GetGeometryRef ( ) geom . Transform ( cT ) outFeature = ogr . Feature ( outLayerDefn ) outFeature . SetGeometry ( geom ) if preserve_fields : for i in range ( 0 , outLayerDefn . GetFieldCount ( ) ) : outFeature . SetField ( outLayerDefn . GetFieldDefn ( i ) . GetNameRef ( ) , inFeature . GetField ( i ) ) outlyr . CreateFeature ( outFeature ) inFeature = lyr . GetNextFeature ( ) return out_ds
Reproject an OGR layer
24,207
def raster_shpclip ( r_fn , shp_fn , extent = 'raster' , bbox = False , pad = None , invert = False , verbose = False ) : from pygeotools . lib import iolib from pygeotools . lib import warplib r_ds = iolib . fn_getds ( r_fn ) r_srs = get_ds_srs ( r_ds ) r_extent = ds_extent ( r_ds ) r_extent_geom = bbox2geom ( r_extent ) shp_ds = ogr . Open ( shp_fn ) lyr = shp_ds . GetLayer ( ) shp_srs = lyr . GetSpatialRef ( ) if not r_srs . IsSame ( shp_srs ) : shp_ds = lyr_proj ( lyr , r_srs ) lyr = shp_ds . GetLayer ( ) shp_extent = lyr_extent ( lyr ) shp_extent_geom = bbox2geom ( shp_extent ) out_srs = r_srs if extent == 'raster' : out_extent = r_extent elif extent == 'shp' : out_extent = shp_extent elif extent == 'intersection' : out_extent = geom_intersection ( [ r_extent_geom , shp_extent_geom ] ) elif extent == 'union' : out_extent = geom_union ( [ r_extent_geom , shp_extent_geom ] ) else : print ( "Unexpected extent specification, reverting to input raster extent" ) out_extent = 'raster' if pad is not None : out_extent = pad_extent ( out_extent , width = pad ) print ( "Raster to clip: %s\nShapefile used to clip: %s" % ( r_fn , shp_fn ) ) if verbose : print ( shp_extent ) print ( r_extent ) print ( out_extent ) r_ds = warplib . memwarp ( r_ds , extent = out_extent , t_srs = out_srs , r = 'cubic' ) r = iolib . ds_getma ( r_ds ) if not bbox : mask = shp2array ( shp_fn , r_ds ) if invert : mask = ~ ( mask ) r = np . ma . array ( r , mask = mask ) return r , r_ds
Clip an input raster by input polygon shapefile for given extent
24,208
def geom2shp ( geom , out_fn , fields = False ) : from pygeotools . lib import timelib driverName = "ESRI Shapefile" drv = ogr . GetDriverByName ( driverName ) if os . path . exists ( out_fn ) : drv . DeleteDataSource ( out_fn ) out_ds = drv . CreateDataSource ( out_fn ) out_lyrname = os . path . splitext ( os . path . split ( out_fn ) [ 1 ] ) [ 0 ] geom_srs = geom . GetSpatialReference ( ) geom_type = geom . GetGeometryType ( ) out_lyr = out_ds . CreateLayer ( out_lyrname , geom_srs , geom_type ) if fields : field_defn = ogr . FieldDefn ( "name" , ogr . OFTString ) field_defn . SetWidth ( 128 ) out_lyr . CreateField ( field_defn ) field_defn = ogr . FieldDefn ( "path" , ogr . OFTString ) field_defn . SetWidth ( 254 ) out_lyr . CreateField ( field_defn ) field_defn = ogr . FieldDefn ( "date" , ogr . OFTInteger ) field_defn . SetWidth ( 32 ) out_lyr . CreateField ( field_defn ) field_defn = ogr . FieldDefn ( "decyear" , ogr . OFTReal ) field_defn . SetPrecision ( 8 ) field_defn . SetWidth ( 64 ) out_lyr . CreateField ( field_defn ) out_feat = ogr . Feature ( out_lyr . GetLayerDefn ( ) ) out_feat . SetGeometry ( geom ) if fields : out_path = os . path . splitext ( out_fn ) [ 0 ] + '.tif' out_feat . SetField ( "name" , os . path . split ( out_path ) [ - 1 ] ) out_feat . SetField ( "path" , out_path ) out_feat_date = timelib . fn_getdatetime ( out_fn ) if out_feat_date is not None : datestamp = int ( out_feat_date . strftime ( '%Y%m%d' ) ) out_feat . SetField ( "date" , datestamp ) decyear = timelib . dt2decyear ( out_feat_date ) out_feat . SetField ( "decyear" , decyear ) out_lyr . CreateFeature ( out_feat ) out_ds = None
Write out a new shapefile for input geometry
24,209
def get_outline ( ds , t_srs = None , scale = 1.0 , simplify = False , convex = False ) : gt = np . array ( ds . GetGeoTransform ( ) ) from pygeotools . lib import iolib a = iolib . ds_getma_sub ( ds , scale = scale ) geom = ogr . Geometry ( ogr . wkbPolygon ) if a . count ( ) != 0 : if ( scale != 1.0 ) : gt [ 1 ] *= scale gt [ 5 ] *= scale ds_srs = get_ds_srs ( ds ) if t_srs is None : t_srs = ds_srs px = np . ma . notmasked_edges ( a , axis = 0 ) x = np . concatenate ( ( px [ 0 ] [ 1 ] [ : : 1 ] , px [ 1 ] [ 1 ] [ : : - 1 ] , [ px [ 0 ] [ 1 ] [ 0 ] ] ) ) y = np . concatenate ( ( px [ 0 ] [ 0 ] [ : : 1 ] , px [ 1 ] [ 0 ] [ : : - 1 ] , [ px [ 0 ] [ 0 ] [ 0 ] ] ) ) mx , my = pixelToMap ( x , y , gt ) geom_wkt = 'POLYGON(({0}))' . format ( ', ' . join ( [ '{0} {1}' . format ( * a ) for a in zip ( mx , my ) ] ) ) geom = ogr . CreateGeometryFromWkt ( geom_wkt ) if not ds_srs . IsSame ( t_srs ) : ct = osr . CoordinateTransformation ( ds_srs , t_srs ) geom . Transform ( ct ) geom . AssignSpatialReference ( t_srs ) if not geom . IsValid ( ) : tol = gt [ 1 ] * 0.1 geom = geom . Simplify ( tol ) if simplify : tol = gt [ 1 ] * 2 geom = geom . Simplify ( tol ) if convex : geom = geom . ConvexHull ( ) else : print ( "No unmasked values found" ) return geom
Generate outline of unmasked values in input raster
24,210
def ds_cT ( ds , x , y , xy_srs = wgs_srs ) : ds_srs = get_ds_srs ( ds ) mX = x mY = y if xy_srs is not None : if not ds_srs . IsSame ( xy_srs ) : mX , mY , mZ = cT_helper ( x , y , 0 , xy_srs , ds_srs ) return mX , mY
Convert input point coordinates to map coordinates that match input dataset
24,211
def line2pts ( geom , dl = None ) : nodes = geom . GetPoints ( ) if dl is None : nsteps = 1000 dl = geom . Length ( ) / nsteps l = [ ] mX = [ ] mY = [ ] l += [ 0 ] x = nodes [ 0 ] [ 0 ] y = nodes [ 0 ] [ 1 ] mX += [ x ] mY += [ y ] rem_l = 0 last_l = l [ - 1 ] for i in range ( 0 , len ( nodes ) - 1 ) : x1 , y1 = nodes [ i ] x2 , y2 = nodes [ i + 1 ] tl = np . sqrt ( ( x2 - x1 ) ** 2 + ( y2 - y1 ) ** 2 ) steps = int ( ( tl + rem_l ) / dl ) if steps > 0 : dx = ( ( x2 - x1 ) / tl ) * dl dy = ( ( y2 - y1 ) / tl ) * dl rem_x = rem_l * ( dx / dl ) rem_y = rem_l * ( dy / dl ) for n in range ( 1 , steps + 1 ) : l += [ last_l + ( dl * n ) ] x = x1 + ( dx * n ) - rem_x y = y1 + ( dy * n ) - rem_y mX += [ x ] mY += [ y ] rem_l += tl - ( steps * dl ) last_l = l [ - 1 ] else : rem_l += tl return l , mX , mY
Given an input line geom generate points at fixed interval Useful for extracting profile data from raster
24,212
def get_res_stats ( ds_list , t_srs = None ) : if t_srs is None : t_srs = get_ds_srs ( ds_list [ 0 ] ) res = np . array ( [ get_res ( ds , t_srs = t_srs ) for ds in ds_list ] ) min = np . min ( res ) max = np . max ( res ) mean = np . mean ( res ) med = np . median ( res ) return ( min , max , mean , med )
Return resolution stats for an input dataset list
24,213
def get_res ( ds , t_srs = None , square = False ) : gt = ds . GetGeoTransform ( ) ds_srs = get_ds_srs ( ds ) res = [ gt [ 1 ] , np . abs ( gt [ 5 ] ) ] if square : res = [ np . mean ( res ) , np . mean ( res ) ] if t_srs is not None and not ds_srs . IsSame ( t_srs ) : if True : extent = ds_extent ( ds , t_srs ) diag = np . sqrt ( ( extent [ 2 ] - extent [ 0 ] ) ** 2 + ( extent [ 3 ] - extent [ 1 ] ) ** 2 ) res = diag / np . sqrt ( ds . RasterXSize ** 2 + ds . RasterYSize ** 2 ) res = [ res , res ] else : ct = osr . CoordinateTransformation ( ds_srs , t_srs ) pt = get_center ( ds ) pt_ct = ct . TransformPoint ( * pt ) pt_ct_plus = ct . TransformPoint ( pt [ 0 ] + gt [ 1 ] , pt [ 1 ] + gt [ 5 ] ) res = [ pt_ct_plus [ 0 ] - pt_ct [ 0 ] , np . abs ( pt_ct_plus [ 1 ] - pt_ct [ 1 ] ) ] return res
Get GDAL Dataset raster resolution
24,214
def get_center ( ds , t_srs = None ) : gt = ds . GetGeoTransform ( ) ds_srs = get_ds_srs ( ds ) center = [ gt [ 0 ] + ( gt [ 1 ] * ds . RasterXSize / 2.0 ) , gt [ 3 ] + ( gt [ 5 ] * ds . RasterYSize / 2.0 ) ] if t_srs is not None and not ds_srs . IsSame ( t_srs ) : ct = osr . CoordinateTransformation ( ds_srs , t_srs ) center = list ( ct . TransformPoint ( * center ) [ 0 : 2 ] ) return center
Get center coordinates of GDAL Dataset
24,215
def get_ds_srs ( ds ) : ds_srs = osr . SpatialReference ( ) ds_srs . ImportFromWkt ( ds . GetProjectionRef ( ) ) return ds_srs
Get srs object for GDAL Datset
24,216
def srs_check ( ds ) : gt = np . array ( ds . GetGeoTransform ( ) ) gt_check = ~ np . all ( gt == np . array ( ( 0.0 , 1.0 , 0.0 , 0.0 , 0.0 , 1.0 ) ) ) proj_check = ( ds . GetProjection ( ) != '' ) out = False if gt_check and proj_check : out = True return out
Check validitiy of Dataset srs
24,217
def ds_IsEmpty ( ds ) : out = False b = ds . GetRasterBand ( 1 ) try : mm = b . ComputeRasterMinMax ( ) if ( mm [ 0 ] == mm [ 1 ] ) : ndv = b . GetNoDataValue ( ) if ndv is None : out = True else : if ( mm [ 0 ] == ndv ) : out = True except Exception : out = True return out
Check to see if dataset is empty after warp
24,218
def gt_corners ( gt , nx , ny ) : ul = [ gt [ 0 ] , gt [ 3 ] ] ll = [ gt [ 0 ] , gt [ 3 ] + ( gt [ 5 ] * ny ) ] ur = [ gt [ 0 ] + ( gt [ 1 ] * nx ) , gt [ 3 ] ] lr = [ gt [ 0 ] + ( gt [ 1 ] * nx ) , gt [ 3 ] + ( gt [ 5 ] * ny ) ] return ul , ll , ur , lr
Get corner coordinates based on input geotransform and raster dimensions
24,219
def ds_geom ( ds , t_srs = None ) : gt = ds . GetGeoTransform ( ) ds_srs = get_ds_srs ( ds ) if t_srs is None : t_srs = ds_srs ns = ds . RasterXSize nl = ds . RasterYSize x = np . array ( [ 0 , ns , ns , 0 , 0 ] , dtype = float ) y = np . array ( [ 0 , 0 , nl , nl , 0 ] , dtype = float ) x -= 0.5 y -= 0.5 mx , my = pixelToMap ( x , y , gt ) geom_wkt = 'POLYGON(({0}))' . format ( ', ' . join ( [ '{0} {1}' . format ( * a ) for a in zip ( mx , my ) ] ) ) geom = ogr . CreateGeometryFromWkt ( geom_wkt ) geom . AssignSpatialReference ( ds_srs ) if not ds_srs . IsSame ( t_srs ) : geom_transform ( geom , t_srs ) return geom
Return dataset bbox envelope as geom
24,220
def geom_wh ( geom ) : e = geom . GetEnvelope ( ) h = e [ 1 ] - e [ 0 ] w = e [ 3 ] - e [ 2 ] return w , h
Compute width and height of geometry in projected units
24,221
def gdaldem_mem_ma ( ma , ds = None , res = None , extent = None , srs = None , processing = 'hillshade' , returnma = False , computeEdges = False ) : if ds is None : ds = mem_ds ( res , extent , srs = None , dtype = gdal . GDT_Float32 ) else : ds = mem_ds_copy ( ds ) b = ds . GetRasterBand ( 1 ) b . WriteArray ( ma ) out = gdaldem_mem_ds ( ds , processing = processing , returnma = returnma ) return out
Wrapper to allow gdaldem calculations for arbitrary NumPy masked array input Untested work in progress placeholder Should only need to specify res can caluclate local gt cartesian srs
24,222
def get_xy_ma ( bma , gt , stride = 1 , origmask = True , newmask = None ) : pX = np . arange ( 0 , bma . shape [ 1 ] , stride ) pY = np . arange ( 0 , bma . shape [ 0 ] , stride ) psamp = np . meshgrid ( pX , pY ) mX , mY = pixelToMap ( psamp [ 0 ] , psamp [ 1 ] , gt ) mask = None if origmask : mask = np . ma . getmaskarray ( bma ) [ : : stride ] if newmask is not None : mask = newmask [ : : stride ] mX = np . ma . array ( mX , mask = mask , fill_value = 0 ) mY = np . ma . array ( mY , mask = mask , fill_value = 0 ) return mX , mY
Return arrays of x and y map coordinates for input array and geotransform
24,223
def get_xy_1D ( ds , stride = 1 , getval = False ) : gt = ds . GetGeoTransform ( ) pX = np . arange ( 0 , ds . RasterXSize , stride ) pY = np . arange ( 0 , ds . RasterYSize , stride ) mX , dummy = pixelToMap ( pX , pY [ 0 ] , gt ) dummy , mY = pixelToMap ( pX [ 0 ] , pY , gt ) return mX , mY
Return 1D arrays of x and y map coordinates for input GDAL Dataset
24,224
def get_xy_grids ( ds , stride = 1 , getval = False ) : gt = ds . GetGeoTransform ( ) pX = np . arange ( 0 , ds . RasterXSize , stride ) pY = np . arange ( 0 , ds . RasterYSize , stride ) psamp = np . meshgrid ( pX , pY ) mX , mY = pixelToMap ( psamp [ 0 ] , psamp [ 1 ] , gt ) return mX , mY
Return 2D arrays of x and y map coordinates for input GDAL Dataset
24,225
def fitPlaneSVD ( XYZ ) : [ rows , cols ] = XYZ . shape p = ( np . ones ( ( rows , 1 ) ) ) AB = np . hstack ( [ XYZ , p ] ) [ u , d , v ] = np . linalg . svd ( AB , 0 ) B = np . array ( v [ 3 , : ] ) coeff = - B [ [ 0 , 1 , 3 ] ] / B [ 2 ] return coeff
Fit a plane to input point data using SVD
24,226
def fitPlaneLSQ ( XYZ ) : [ rows , cols ] = XYZ . shape G = np . ones ( ( rows , 3 ) ) G [ : , 0 ] = XYZ [ : , 0 ] G [ : , 1 ] = XYZ [ : , 1 ] Z = XYZ [ : , 2 ] coeff , resid , rank , s = np . linalg . lstsq ( G , Z , rcond = None ) return coeff
Fit a plane to input point data using LSQ
24,227
def ds_fitplane ( ds ) : from pygeotools . lib import iolib bma = iolib . ds_getma ( ds ) gt = ds . GetGeoTransform ( ) return ma_fitplane ( bma , gt )
Fit a plane to values in GDAL Dataset
24,228
def getUTMzone ( geom ) : lon , lat = geom . Centroid ( ) . GetPoint_2D ( ) lon180 = ( lon + 180 ) - np . floor ( ( lon + 180 ) / 360 ) * 360 - 180 zonenum = int ( np . floor ( ( lon180 + 180 ) / 6 ) + 1 ) if lat >= 0 : zonehem = 'N' else : zonehem = 'S' if ( lat >= 56.0 and lat < 64.0 and lon180 >= 3.0 and lon180 < 12.0 ) : zonenum = 32 if ( lat >= 72.0 and lat < 84.0 ) : if ( lon180 >= 0.0 and lon180 < 9.0 ) : zonenum = 31 elif ( lon180 >= 9.0 and lon180 < 21.0 ) : zonenum = 33 elif ( lon180 >= 21.0 and lon180 < 33.0 ) : zonenum = 35 elif ( lon180 >= 33.0 and lon180 < 42.0 ) : zonenum = 37 return str ( zonenum ) + zonehem
Determine UTM Zone for input geometry
24,229
def get_proj ( geom , proj_list = None ) : out_srs = None if proj_list is None : proj_list = gen_proj_list ( ) for projbox in proj_list : if projbox . geom . Intersects ( geom ) : out_srs = projbox . srs break if out_srs is None : out_srs = getUTMsrs ( geom ) return out_srs
Determine best projection for input geometry
24,230
def gen_proj_list ( ) : proj_list = [ ] proj_list . append ( ProjBox ( [ - 180 , - 130 , 51.35 , 71.35 ] , 3338 ) ) proj_list . append ( ProjBox ( [ 150 , 175 , - 80 , - 70 ] , 3294 ) ) proj_list . append ( ProjBox ( [ - 180 , 180 , 58 , 82 ] , 3413 ) ) proj_list . append ( ProjBox ( [ - 180 , 180 , - 90 , - 58 ] , 3031 ) ) proj_list . append ( ProjBox ( [ - 180 , 180 , 60 , 90 ] , 3413 ) ) return proj_list
Create list of projections with cascading preference
24,231
def xy2geom ( x , y , t_srs = None ) : geom_wkt = 'POINT({0} {1})' . format ( x , y ) geom = ogr . CreateGeometryFromWkt ( geom_wkt ) if t_srs is not None and not wgs_srs . IsSame ( t_srs ) : ct = osr . CoordinateTransformation ( t_srs , wgs_srs ) geom . Transform ( ct ) geom . AssignSpatialReference ( t_srs ) return geom
Convert x and y point coordinates to geom
24,232
def get_dem_mosaic_cmd ( fn_list , o , fn_list_txt = None , tr = None , t_srs = None , t_projwin = None , georef_tile_size = None , threads = None , tile = None , stat = None ) : cmd = [ 'dem_mosaic' , ] if o is None : o = 'mos' cmd . extend ( [ '-o' , o ] ) if threads is None : from pygeotools . lib import iolib threads = iolib . cpu_count ( ) cmd . extend ( [ '--threads' , threads ] ) if tr is not None : cmd . extend ( [ '--tr' , tr ] ) if t_srs is not None : cmd . extend ( [ '--t_srs' , '"%s"' % t_srs . ExportToProj4 ( ) ] ) if t_projwin is not None : cmd . append ( '--t_projwin' ) cmd . extend ( t_projwin ) cmd . append ( '--force-projwin' ) if tile is not None : cmd . append ( '--tile-index' ) cmd . append ( tile ) if georef_tile_size is not None : cmd . extend ( [ '--georef-tile-size' , georef_tile_size ] ) if stat is not None : if stat == 'wmean' : stat = None else : cmd . append ( '--%s' % stat . replace ( 'index' , '' ) ) if stat in [ 'lastindex' , 'firstindex' , 'medianindex' ] : cmd . append ( '--save-index-map' ) cmd . extend ( [ '--output-nodata-value' , '-9999' ] ) if fn_list_txt is not None : if os . path . exists ( fn_list_txt ) : cmd . append ( '-l' ) cmd . append ( fn_list_txt ) else : print ( "Could not find input text file containing list of inputs" ) else : cmd . extend ( fn_list ) cmd = [ str ( i ) for i in cmd ] return cmd
Create ASP dem_mosaic command Useful for spawning many single - threaded mosaicing processes
24,233
def parse_rs_alg ( r ) : if r == 'near' : gra = gdal . GRA_NearestNeighbour elif r == 'bilinear' : gra = gdal . GRA_Bilinear elif r == 'cubic' : gra = gdal . GRA_Cubic elif r == 'cubicspline' : gra = gdal . GRA_CubicSpline elif r == 'average' : gra = gdal . GRA_Average elif r == 'lanczos' : gra = gdal . GRA_Lanczos elif r == 'mode' : gra = gdal . GRA_Mode else : gra = None sys . exit ( "Invalid resampling method" ) return gra
Parse resampling algorithm
24,234
def parse_srs ( t_srs , src_ds_list = None ) : if t_srs is None and src_ds_list is None : print ( "Input t_srs and src_ds_list are both None" ) else : if t_srs is None : t_srs = 'first' if t_srs == 'first' and src_ds_list is not None : t_srs = geolib . get_ds_srs ( src_ds_list [ 0 ] ) elif t_srs == 'last' and src_ds_list is not None : t_srs = geolib . get_ds_srs ( src_ds_list [ - 1 ] ) elif isinstance ( t_srs , osr . SpatialReference ) : pass elif isinstance ( t_srs , gdal . Dataset ) : t_srs = geolib . get_ds_srs ( t_srs ) elif isinstance ( t_srs , str ) and os . path . exists ( t_srs ) : t_srs = geolib . get_ds_srs ( gdal . Open ( t_srs ) ) elif isinstance ( t_srs , str ) : temp = osr . SpatialReference ( ) if 'EPSG' in t_srs . upper ( ) : epsgcode = int ( t_srs . split ( ':' ) [ - 1 ] ) temp . ImportFromEPSG ( epsgcode ) elif 'proj' in t_srs : temp . ImportFromProj4 ( t_srs ) else : temp . ImportFromWkt ( t_srs ) t_srs = temp else : t_srs = None return t_srs
Parse arbitrary input t_srs
24,235
def parse_res ( res , src_ds_list = None , t_srs = None ) : t_srs = parse_srs ( t_srs , src_ds_list ) res_str_list = [ 'first' , 'last' , 'min' , 'max' , 'mean' , 'med' , 'common_scale_factor' ] if res in res_str_list and src_ds_list is not None : res_stats = geolib . get_res_stats ( src_ds_list , t_srs = t_srs ) if res == 'first' : res = geolib . get_res ( src_ds_list [ 0 ] , t_srs = t_srs , square = True ) [ 0 ] elif res == 'last' : res = geolib . get_res ( src_ds_list [ - 1 ] , t_srs = t_srs , square = True ) [ 0 ] elif res == 'min' : res = res_stats [ 0 ] elif res == 'max' : res = res_stats [ 1 ] elif res == 'mean' : res = res_stats [ 2 ] elif res == 'med' : res = res_stats [ 3 ] elif res == 'common_scale_factor' : res = np . sqrt ( res_stats [ 1 ] / res_stats [ 0 ] ) * res_stats [ 0 ] elif res == 'source' : res = None elif isinstance ( res , gdal . Dataset ) : res = geolib . get_res ( res , t_srs = t_srs , square = True ) [ 0 ] elif isinstance ( res , str ) and os . path . exists ( res ) : res = geolib . get_res ( gdal . Open ( res ) , t_srs = t_srs , square = True ) [ 0 ] else : res = float ( res ) return res
Parse arbitrary input res
24,236
def parse_extent ( extent , src_ds_list = None , t_srs = None ) : if t_srs is not None : t_srs = parse_srs ( t_srs , src_ds_list ) extent_str_list = [ 'first' , 'last' , 'intersection' , 'union' ] if extent in extent_str_list and src_ds_list is not None : if len ( src_ds_list ) == 1 and ( extent == 'intersection' or extent == 'union' ) : extent = None elif extent == 'first' : extent = geolib . ds_geom_extent ( src_ds_list [ 0 ] , t_srs = t_srs ) elif extent == 'last' : extent = geolib . ds_geom_extent ( src_ds_list [ - 1 ] , t_srs = t_srs ) elif extent == 'intersection' : extent = geolib . ds_geom_intersection_extent ( src_ds_list , t_srs = t_srs ) if len ( src_ds_list ) > 1 and extent is None : sys . exit ( "Input images do not intersect" ) elif extent == 'union' : extent = geolib . ds_geom_union_extent ( src_ds_list , t_srs = t_srs ) elif extent == 'source' : extent = None elif isinstance ( extent , gdal . Dataset ) : extent = geolib . ds_geom_extent ( extent , t_srs = t_srs ) elif isinstance ( extent , str ) and os . path . exists ( extent ) : extent = geolib . ds_geom_extent ( gdal . Open ( extent ) , t_srs = t_srs ) elif isinstance ( extent , ( list , tuple , np . ndarray ) ) : extent = list ( extent ) else : extent = [ float ( i ) for i in extent . split ( ' ' ) ] return extent
Parse arbitrary input extent
24,237
def memwarp_multi ( src_ds_list , res = 'first' , extent = 'intersection' , t_srs = 'first' , r = 'cubic' , verbose = True , dst_ndv = 0 ) : return warp_multi ( src_ds_list , res , extent , t_srs , r , warptype = memwarp , verbose = verbose , dst_ndv = dst_ndv )
Helper function for memwarp of multiple input GDAL Datasets
24,238
def memwarp_multi_fn ( src_fn_list , res = 'first' , extent = 'intersection' , t_srs = 'first' , r = 'cubic' , verbose = True , dst_ndv = 0 ) : if not iolib . fn_list_check ( src_fn_list ) : sys . exit ( 'Missing input file(s)' ) src_ds_list = [ gdal . Open ( fn , gdal . GA_ReadOnly ) for fn in src_fn_list ] return memwarp_multi ( src_ds_list , res , extent , t_srs , r , verbose = verbose , dst_ndv = dst_ndv )
Helper function for memwarp of multiple input filenames
24,239
def diskwarp_multi ( src_ds_list , res = 'first' , extent = 'intersection' , t_srs = 'first' , r = 'cubic' , verbose = True , outdir = None , dst_ndv = None ) : return warp_multi ( src_ds_list , res , extent , t_srs , r , verbose = verbose , warptype = diskwarp , outdir = outdir , dst_ndv = dst_ndv )
Helper function for diskwarp of multiple input GDAL Datasets
24,240
def diskwarp_multi_fn ( src_fn_list , res = 'first' , extent = 'intersection' , t_srs = 'first' , r = 'cubic' , verbose = True , outdir = None , dst_ndv = None ) : if not iolib . fn_list_check ( src_fn_list ) : sys . exit ( 'Missing input file(s)' ) src_ds_list = [ gdal . Open ( fn , gdal . GA_ReadOnly ) for fn in src_fn_list ] return diskwarp_multi ( src_ds_list , res , extent , t_srs , r , verbose = verbose , outdir = outdir , dst_ndv = dst_ndv )
Helper function for diskwarp of multiple input filenames
24,241
def writeout ( ds , outfn ) : print ( "Writing out %s" % outfn ) out_ds = iolib . gtif_drv . CreateCopy ( outfn , ds , 0 , options = iolib . gdal_opt ) out_ds = None
Write ds to disk
24,242
def getLocalTime ( utc_dt , tz ) : import pytz local_tz = pytz . timezone ( tz ) local_dt = utc_dt . replace ( tzinfo = pytz . utc ) . astimezone ( local_tz ) return local_dt
Return local timezone time
24,243
def strptime_fuzzy ( s ) : import dateutil . parser dt = dateutil . parser . parse ( str ( s ) , fuzzy = True ) return dt
Fuzzy date string parsing
24,244
def fn_getdatetime_list ( fn ) : fn = os . path . split ( os . path . splitext ( fn ) [ 0 ] ) [ - 1 ] import re dstr = None out = None dstr = re . findall ( r'(?:^|_|-)(?:19|20)[0-9][0-9](?:0[1-9]|1[012])(?:0[1-9]|[12][0-9]|3[01])[_T](?:0[0-9]|1[0-9]|2[0-3])[0-5][0-9]' , fn ) if not dstr : dstr = re . findall ( r'(?:^|_|-)(?:19|20)[0-9][0-9](?:0[1-9]|1[012])(?:0[1-9]|[12][0-9]|3[01])(?:0[0-9]|1[0-9]|2[0-3])[0-5][0-9]' , fn ) if not dstr : dstr = re . findall ( r'(?:^|_|-)(?:19|20)[0-9][0-9](?:0[1-9]|1[012])(?:0[1-9]|[12][0-9]|3[01])(?:$|_|-)' , fn ) if not dstr : dstr = re . findall ( r'(?:^|_|-)(?:19|20)[0-9][0-9]\.[0-9][0-9][0-9]*(?:$|_|-)' , fn ) dstr = [ d . lstrip ( '_' ) . rstrip ( '_' ) for d in dstr ] dstr = [ d . lstrip ( '-' ) . rstrip ( '-' ) for d in dstr ] out = [ decyear2dt ( float ( s ) ) for s in dstr ] dstr = None if not dstr : dstr = re . findall ( r'(?:^|_|-)(?:19|20)[0-9][0-9](?:$|_|-)' , fn ) if not dstr : dstr = re . findall ( r'[0-3][0-9][a-z][a-z][a-z][0-9][0-9]' , fn ) if dstr : out = [ datetime . strptime ( s , '%d%b%y' ) for s in dstr ] [ 0 ] dstr = None if dstr : dstr = [ d . lstrip ( '_' ) . rstrip ( '_' ) for d in dstr ] dstr = [ d . lstrip ( '-' ) . rstrip ( '-' ) for d in dstr ] out = [ strptime_fuzzy ( s ) for s in dstr ] return out
Extract all datetime strings from input filename
24,245
def get_t_factor ( t1 , t2 ) : t_factor = None if t1 is not None and t2 is not None and t1 != t2 : dt = t2 - t1 year = timedelta ( days = 365.25 ) t_factor = abs ( dt . total_seconds ( ) / year . total_seconds ( ) ) return t_factor
Time difference between two datetimes expressed as decimal year
24,246
def sort_fn_list ( fn_list ) : dt_list = get_dt_list ( fn_list ) fn_list_sort = [ fn for ( dt , fn ) in sorted ( zip ( dt_list , fn_list ) ) ] return fn_list_sort
Sort input filename list by datetime
24,247
def fix_repeat_dt ( dt_list , offset_s = 0.001 ) : idx = ( np . diff ( dt_list ) == timedelta ( 0 ) ) while np . any ( idx ) : dt_list [ idx . nonzero ( ) [ 0 ] + 1 ] += timedelta ( seconds = offset_s ) idx = ( np . diff ( dt_list ) == timedelta ( 0 ) ) return dt_list
Add some small offset to remove duplicate times Needed for xarray interp which expects monotonically increasing times
24,248
def get_dt_list ( fn_list ) : dt_list = np . array ( [ fn_getdatetime ( fn ) for fn in fn_list ] ) return dt_list
Get list of datetime objects extracted from a filename
24,249
def get_closest_dt_idx ( dt , dt_list ) : from pygeotools . lib import malib dt_list = malib . checkma ( dt_list , fix = False ) dt_diff = np . abs ( dt - dt_list ) return dt_diff . argmin ( )
Get indices of dt_list that is closest to input dt
24,250
def mean_date ( dt_list ) : dt_list_sort = sorted ( dt_list ) dt_list_sort_rel = [ dt - dt_list_sort [ 0 ] for dt in dt_list_sort ] avg_timedelta = sum ( dt_list_sort_rel , timedelta ( ) ) / len ( dt_list_sort_rel ) return dt_list_sort [ 0 ] + avg_timedelta
Calcuate mean datetime from datetime list
24,251
def median_date ( dt_list ) : idx = len ( dt_list ) / 2 if len ( dt_list ) % 2 == 0 : md = mean_date ( [ dt_list [ idx - 1 ] , dt_list [ idx ] ] ) else : md = dt_list [ idx ] return md
Calcuate median datetime from datetime list
24,252
def dt_cluster ( dt_list , dt_thresh = 16.0 ) : if not isinstance ( dt_list [ 0 ] , float ) : o_list = dt2o ( dt_list ) else : o_list = dt_list o_list_sort = np . sort ( o_list ) o_list_sort_idx = np . argsort ( o_list ) d = np . diff ( o_list_sort ) b = np . nonzero ( d > dt_thresh ) [ 0 ] + 1 b = np . hstack ( ( 0 , b , d . shape [ 0 ] + 1 ) ) f_list = [ ] for i in range ( len ( b ) - 1 ) : b_idx = [ b [ i ] , b [ i + 1 ] - 1 ] b_dt = o_list_sort [ b_idx ] b_idx_orig = o_list_sort_idx [ b_idx ] all_idx = np . arange ( b_idx [ 0 ] , b_idx [ 1 ] ) all_sort = o_list_sort [ all_idx ] all_idx_orig = o_list_sort_idx [ all_idx ] dict = { } dict [ 'break_indices' ] = b_idx_orig dict [ 'break_ts_o' ] = b_dt dict [ 'break_ts_dt' ] = o2dt ( b_dt ) dict [ 'all_indices' ] = all_idx_orig dict [ 'all_ts_o' ] = all_sort dict [ 'all_ts_dt' ] = o2dt ( all_sort ) f_list . append ( dict ) return f_list
Find clusters of similar datetimes within datetime list
24,253
def dt2decyear ( dt ) : year = dt . year startOfThisYear = datetime ( year = year , month = 1 , day = 1 ) startOfNextYear = datetime ( year = year + 1 , month = 1 , day = 1 ) yearElapsed = sinceEpoch ( dt ) - sinceEpoch ( startOfThisYear ) yearDuration = sinceEpoch ( startOfNextYear ) - sinceEpoch ( startOfThisYear ) fraction = yearElapsed / yearDuration return year + fraction
Convert datetime to decimal year
24,254
def decyear2dt ( t ) : year = int ( t ) rem = t - year base = datetime ( year , 1 , 1 ) dt = base + timedelta ( seconds = ( base . replace ( year = base . year + 1 ) - base ) . total_seconds ( ) * rem ) return dt
Convert decimal year to datetime
24,255
def dt2jd ( dt ) : a = ( 14 - dt . month ) // 12 y = dt . year + 4800 - a m = dt . month + 12 * a - 3 return dt . day + ( ( 153 * m + 2 ) // 5 ) + 365 * y + y // 4 - y // 100 + y // 400 - 32045
Convert datetime to julian date
24,256
def jd2dt ( jd ) : n = int ( round ( float ( jd ) ) ) a = n + 32044 b = ( 4 * a + 3 ) // 146097 c = a - ( 146097 * b ) // 4 d = ( 4 * c + 3 ) // 1461 e = c - ( 1461 * d ) // 4 m = ( 5 * e + 2 ) // 153 day = e + 1 - ( 153 * m + 2 ) // 5 month = m + 3 - 12 * ( m // 10 ) year = 100 * b + d - 4800 + m / 10 tfrac = 0.5 + float ( jd ) - n tfrac_s = 86400.0 * tfrac minfrac , hours = np . modf ( tfrac_s / 3600. ) secfrac , minutes = np . modf ( minfrac * 60. ) microsec , seconds = np . modf ( secfrac * 60. ) return datetime ( year , month , day , int ( hours ) , int ( minutes ) , int ( seconds ) , int ( microsec * 1E6 ) )
Convert julian date to datetime
24,257
def gps2dt ( gps_week , gps_ms ) : gps_epoch = datetime ( 1980 , 1 , 6 , 0 , 0 , 0 ) gps_week_s = timedelta ( seconds = gps_week * 7 * 24 * 60 * 60 ) gps_ms_s = timedelta ( milliseconds = gps_ms ) return gps_epoch + gps_week_s + gps_ms_s
Convert GPS week and ms to a datetime
24,258
def disco_loop ( opc , version , queue , real_out , dup_lines = False , show_bytes = False ) : while len ( queue ) > 0 : co = queue . popleft ( ) if co . co_name not in ( '<module>' , '?' ) : real_out . write ( "\n" + format_code_info ( co , version ) + "\n" ) bytecode = Bytecode ( co , opc , dup_lines = dup_lines ) real_out . write ( bytecode . dis ( show_bytes = show_bytes ) + "\n" ) for c in co . co_consts : if iscode ( c ) : queue . append ( c ) pass pass
Disassembles a queue of code objects . If we discover another code object which will be found in co_consts we add the new code to the list . Note that the order of code discovery is in the order of first encountered which is not amenable for the format used by a disassembler where code objects should be defined before using them in other functions . However this is not recursive and will overall lead to less memory consumption at run time .
24,259
def disco_loop_asm_format ( opc , version , co , real_out , fn_name_map , all_fns ) : if version < 3.0 : co = code2compat ( co ) else : co = code3compat ( co ) co_name = co . co_name mapped_name = fn_name_map . get ( co_name , co_name ) new_consts = [ ] for c in co . co_consts : if iscode ( c ) : if version < 3.0 : c_compat = code2compat ( c ) else : c_compat = code3compat ( c ) disco_loop_asm_format ( opc , version , c_compat , real_out , fn_name_map , all_fns ) m = re . match ( ".* object <(.+)> at" , str ( c ) ) if m : basename = m . group ( 1 ) if basename != 'module' : mapped_name = code_uniquify ( basename , c . co_code ) c_compat . co_name = mapped_name c_compat . freeze ( ) new_consts . append ( c_compat ) else : new_consts . append ( c ) pass co . co_consts = new_consts m = re . match ( "^<(.+)>$" , co . co_name ) if m or co_name in all_fns : if co_name in all_fns : basename = co_name else : basename = m . group ( 1 ) if basename != 'module' : mapped_name = code_uniquify ( basename , co . co_code ) co_name = mapped_name assert mapped_name not in fn_name_map fn_name_map [ mapped_name ] = basename co . co_name = mapped_name pass elif co_name in fn_name_map : mapped_name = code_uniquify ( co_name , co . co_code ) fn_name_map [ mapped_name ] = co_name co . co_name = mapped_name pass co = co . freeze ( ) all_fns . add ( co_name ) if co . co_name != '<module>' or co . co_filename : real_out . write ( "\n" + format_code_info ( co , version , mapped_name ) + "\n" ) bytecode = Bytecode ( co , opc , dup_lines = True ) real_out . write ( bytecode . dis ( asm_format = True ) + "\n" )
Produces disassembly in a format more conducive to automatic assembly by producing inner modules before they are used by outer ones . Since this is recusive we ll use more stack space at runtime .
24,260
def wr_long ( f , x ) : if PYTHON3 : f . write ( bytes ( [ x & 0xff ] ) ) f . write ( bytes ( [ ( x >> 8 ) & 0xff ] ) ) f . write ( bytes ( [ ( x >> 16 ) & 0xff ] ) ) f . write ( bytes ( [ ( x >> 24 ) & 0xff ] ) ) else : f . write ( chr ( x & 0xff ) ) f . write ( chr ( ( x >> 8 ) & 0xff ) ) f . write ( chr ( ( x >> 16 ) & 0xff ) ) f . write ( chr ( ( x >> 24 ) & 0xff ) )
Internal ; write a 32 - bit int to a file in little - endian order .
24,261
def dump_compile ( codeobject , filename , timestamp , magic ) : path_tmp = '%s.%s' % ( filename , id ( filename ) ) fc = None try : fc = open ( path_tmp , 'wb' ) if PYTHON3 : fc . write ( bytes ( [ 0 , 0 , 0 , 0 ] ) ) else : fc . write ( '\0\0\0\0' ) wr_long ( fc , timestamp ) marshal . dump ( codeobject , fc ) fc . flush ( ) fc . seek ( 0 , 0 ) fc . write ( magic ) fc . close ( ) os . rename ( path_tmp , filename ) except OSError : try : os . unlink ( path_tmp ) except OSError : pass raise finally : if fc : fc . close ( )
Write code object as a byte - compiled file
24,262
def int2magic ( magic_int ) : if ( sys . version_info >= ( 3 , 0 ) ) : return struct . pack ( '<Hcc' , magic_int , bytes ( '\r' , 'utf-8' ) , bytes ( '\n' , 'utf-8' ) ) else : return struct . pack ( '<Hcc' , magic_int , '\r' , '\n' )
Given a magic int like 62211 compute the corresponding magic byte string b \ x03 \ xf3 \ r \ n using the conversion method that does this .
24,263
def sysinfo2float ( version_info = sys . version_info ) : vers_str = '.' . join ( [ str ( v ) for v in version_info [ 0 : 3 ] ] ) if version_info [ 3 ] != 'final' : vers_str += '.' + '' . join ( [ str ( i ) for i in version_info [ 3 : ] ] ) if IS_PYPY : vers_str += 'pypy' else : try : import platform platform = platform . python_implementation ( ) if platform in ( 'Jython' , 'Pyston' ) : vers_str += platform pass except ImportError : pass except AttributeError : pass return py_str2float ( vers_str )
Convert a sys . versions_info - compatible list into a canonic floating - point number which that can then be used to look up a magic number . Note that this can only be used for released version of C Python not interim development versions since we can t represent that as a floating - point number .
24,264
def sysinfo2magic ( version_info = sys . version_info ) : vers_str = '.' . join ( [ str ( v ) for v in version_info [ 0 : 3 ] ] ) if version_info [ 3 ] != 'final' : vers_str += '' . join ( [ str ( v ) for v in version_info [ 3 : ] ] ) if IS_PYPY : vers_str += 'pypy' else : try : import platform platform = platform . python_implementation ( ) if platform in ( 'Jython' , 'Pyston' ) : vers_str += platform pass except ImportError : pass return magics [ vers_str ]
Convert a list sys . versions_info compatible list into a canonic floating - point number which that can then be used to look up a magic number . Note that this can raise an exception .
24,265
def init_opdata ( l , from_mod , version = None , is_pypy = False ) : if version : l [ 'python_version' ] = version l [ 'is_pypy' ] = is_pypy l [ 'cmp_op' ] = cmp_op l [ 'HAVE_ARGUMENT' ] = HAVE_ARGUMENT if version <= 3.5 : l [ 'findlinestarts' ] = findlinestarts l [ 'findlabels' ] = findlabels l [ 'get_jump_targets' ] = get_jump_targets l [ 'get_jump_target_maps' ] = get_jump_target_maps else : l [ 'findlinestarts' ] = wordcode . findlinestarts l [ 'findlabels' ] = wordcode . findlabels l [ 'get_jump_targets' ] = wordcode . get_jump_targets l [ 'get_jump_target_maps' ] = wordcode . get_jump_target_maps l [ 'opmap' ] = deepcopy ( from_mod . opmap ) l [ 'opname' ] = deepcopy ( from_mod . opname ) for field in fields2copy : l [ field ] = list ( getattr ( from_mod , field ) )
Sets up a number of the structures found in Python s opcode . py . Python opcode . py routines assign attributes to modules . In order to do this in a modular way here the local dictionary for the module is passed .
24,266
def rm_op ( l , name , op ) : l [ 'opname' ] [ op ] = '<%s>' % op if op in l [ 'hasconst' ] : l [ 'hasconst' ] . remove ( op ) if op in l [ 'hascompare' ] : l [ 'hascompare' ] . remove ( op ) if op in l [ 'hascondition' ] : l [ 'hascondition' ] . remove ( op ) if op in l [ 'hasfree' ] : l [ 'hasfree' ] . remove ( op ) if op in l [ 'hasjabs' ] : l [ 'hasjabs' ] . remove ( op ) if op in l [ 'hasname' ] : l [ 'hasname' ] . remove ( op ) if op in l [ 'hasjrel' ] : l [ 'hasjrel' ] . remove ( op ) if op in l [ 'haslocal' ] : l [ 'haslocal' ] . remove ( op ) if op in l [ 'hasname' ] : l [ 'hasname' ] . remove ( op ) if op in l [ 'hasnargs' ] : l [ 'hasnargs' ] . remove ( op ) if op in l [ 'hasvargs' ] : l [ 'hasvargs' ] . remove ( op ) if op in l [ 'nofollow' ] : l [ 'nofollow' ] . remove ( op ) assert l [ 'opmap' ] [ name ] == op del l [ 'opmap' ] [ name ]
Remove an opcode . This is used when basing a new Python release off of another one and there is an opcode that is in the old release that was removed in the new release . We are pretty aggressive about removing traces of the op .
24,267
def opcode_check ( l ) : if ( abs ( PYTHON_VERSION - l [ 'python_version' ] ) <= 0.01 and IS_PYPY == l [ 'is_pypy' ] ) : try : import dis opmap = fix_opcode_names ( dis . opmap ) assert all ( item in opmap . items ( ) for item in l [ 'opmap' ] . items ( ) ) assert all ( item in l [ 'opmap' ] . items ( ) for item in opmap . items ( ) ) except : import sys
When the version of Python we are running happens to have the same opcode set as the opcode we are importing we perform checks to make sure our opcode set matches exactly .
24,268
def dump_opcodes ( opmap ) : op2name = { } for k in opmap . keys ( ) : op2name [ opmap [ k ] ] = k for i in sorted ( op2name . keys ( ) ) : print ( "%-3s %s" % ( str ( i ) , op2name [ i ] ) )
Utility for dumping opcodes
24,269
def pretty_flags ( flags ) : names = [ ] result = "0x%08x" % flags for i in range ( 32 ) : flag = 1 << i if flags & flag : names . append ( COMPILER_FLAG_NAMES . get ( flag , hex ( flag ) ) ) flags ^= flag if not flags : break else : names . append ( hex ( flags ) ) names . reverse ( ) return "%s (%s)" % ( result , " | " . join ( names ) )
Return pretty representation of code flags .
24,270
def _try_compile ( source , name ) : try : c = compile ( source , name , 'eval' ) except SyntaxError : c = compile ( source , name , 'exec' ) return c
Attempts to compile the given source first as an expression and then as a statement if the first approach fails .
24,271
def get_code_object ( x ) : if hasattr ( x , '__func__' ) : x = x . __func__ if hasattr ( x , '__code__' ) : x = x . __code__ if hasattr ( x , 'gi_code' ) : x = x . gi_code if isinstance ( x , str ) : x = _try_compile ( x , "<disassembly>" ) if hasattr ( x , 'co_code' ) : return x raise TypeError ( "don't know how to disassemble %s objects" % type ( x ) . __name__ )
Helper to handle methods functions generators strings and raw code objects
24,272
def get_jump_target_maps ( code , opc ) : offset2prev = { } prev_offset = - 1 for offset , op , arg in unpack_opargs_bytecode ( code , opc ) : if prev_offset >= 0 : prev_list = offset2prev . get ( offset , [ ] ) prev_list . append ( prev_offset ) offset2prev [ offset ] = prev_list if op in opc . NOFOLLOW : prev_offset = - 1 else : prev_offset = offset if arg is not None : jump_offset = - 1 if op in opc . JREL_OPS : op_len = op_size ( op , opc ) jump_offset = offset + op_len + arg elif op in opc . JABS_OPS : jump_offset = arg if jump_offset >= 0 : prev_list = offset2prev . get ( jump_offset , [ ] ) prev_list . append ( offset ) offset2prev [ jump_offset ] = prev_list return offset2prev
Returns a dictionary where the key is an offset and the values are a list of instruction offsets which can get run before that instruction . This includes jump instructions as well as non - jump instructions . Therefore the keys of the dictionary are reachable instructions . The values of the dictionary may be useful in control - flow analysis .
24,273
def _get_const_info ( const_index , const_list ) : argval = const_index if const_list is not None : argval = const_list [ const_index ] if isinstance ( argval , float ) and str ( argval ) in frozenset ( [ 'nan' , '-nan' , 'inf' , '-inf' ] ) : return argval , "float('%s')" % argval return argval , repr ( argval )
Helper to get optional details about const references
24,274
def _get_name_info ( name_index , name_list ) : argval = name_index if ( name_list is not None and name_index < len ( name_list ) ) : argval = name_list [ name_index ] argrepr = argval else : argrepr = repr ( argval ) return argval , argrepr
Helper to get optional details about named references
24,275
def instruction_size ( op , opc ) : if op < opc . HAVE_ARGUMENT : return 2 if opc . version >= 3.6 else 1 else : return 2 if opc . version >= 3.6 else 3
For a given opcode op in opcode module opc return the size in bytes of an op instruction .
24,276
def disassemble ( self , lineno_width = 3 , mark_as_current = False , asm_format = False , show_bytes = False ) : fields = [ ] if asm_format : indexed_operand = set ( [ 'name' , 'local' , 'compare' , 'free' ] ) if lineno_width : if self . starts_line is not None : if asm_format : lineno_fmt = "%%%dd:\n" % lineno_width fields . append ( lineno_fmt % self . starts_line ) fields . append ( ' ' * ( lineno_width ) ) if self . is_jump_target : fields . append ( ' ' * ( lineno_width - 1 ) ) else : lineno_fmt = "%%%dd:" % lineno_width fields . append ( lineno_fmt % self . starts_line ) else : fields . append ( ' ' * ( lineno_width + 1 ) ) if mark_as_current and not asm_format : fields . append ( ' ) else : fields . append ( ' ' ) if self . is_jump_target : if not asm_format : fields . append ( '>>' ) else : fields = [ "L%d:\n" % self . offset ] + fields if not self . starts_line : fields . append ( ' ' ) else : fields . append ( ' ' ) if not asm_format : fields . append ( repr ( self . offset ) . rjust ( 4 ) ) if show_bytes : hex_bytecode = "|%02x" % self . opcode if self . inst_size == 1 : hex_bytecode += ' ' * ( 2 * 3 ) if self . inst_size == 2 : if self . has_arg : hex_bytecode += " %02x" % ( self . arg % 256 ) else : hex_bytecode += ' 00' elif self . inst_size == 3 : hex_bytecode += " %02x %02x" % ( ( self . arg >> 8 , self . arg % 256 ) ) fields . append ( hex_bytecode + '|' ) fields . append ( self . opname . ljust ( 20 ) ) if self . arg is not None : argrepr = self . argrepr if asm_format : if self . optype == 'jabs' : fields . append ( 'L' + str ( self . arg ) ) elif self . optype == 'jrel' : argval = self . offset + self . arg + self . inst_size fields . append ( 'L' + str ( argval ) ) elif self . optype in indexed_operand : fields . append ( '(%s)' % argrepr ) argrepr = None elif ( self . optype == 'const' and not re . search ( '\s' , argrepr ) ) : fields . append ( '(%s)' % argrepr ) argrepr = None else : fields . append ( repr ( self . arg ) ) elif not ( show_bytes and argrepr ) : fields . append ( repr ( self . arg ) . rjust ( 6 ) ) if argrepr : fields . append ( '(%s)' % argrepr ) pass pass return ' ' . join ( fields ) . rstrip ( )
Format instruction details for inclusion in disassembly output
24,277
def from_traceback ( cls , tb ) : while tb . tb_next : tb = tb . tb_next return cls ( tb . tb_frame . f_code , current_offset = tb . tb_lasti )
Construct a Bytecode from the given traceback
24,278
def dis ( self , asm_format = False , show_bytes = False ) : co = self . codeobj if self . current_offset is not None : offset = self . current_offset else : offset = - 1 output = StringIO ( ) self . disassemble_bytes ( co . co_code , varnames = co . co_varnames , names = co . co_names , constants = co . co_consts , cells = self . _cell_names , linestarts = self . _linestarts , line_offset = self . _line_offset , file = output , lasti = offset , asm_format = asm_format , show_bytes = show_bytes ) return output . getvalue ( )
Return a formatted view of the bytecode operations .
24,279
def get_jump_target_maps ( code , opc ) : offset2prev = { } prev_offset = - 1 for offset , op , arg in unpack_opargs_wordcode ( code , opc ) : if prev_offset >= 0 : prev_list = offset2prev . get ( offset , [ ] ) prev_list . append ( prev_offset ) offset2prev [ offset ] = prev_list prev_offset = offset if op in opc . NOFOLLOW : prev_offset = - 1 if arg is not None : jump_offset = - 1 if op in opc . JREL_OPS : jump_offset = offset + 2 + arg elif op in opc . JABS_OPS : jump_offset = arg if jump_offset >= 0 : prev_list = offset2prev . get ( jump_offset , [ ] ) prev_list . append ( offset ) offset2prev [ jump_offset ] = prev_list return offset2prev
Returns a dictionary where the key is an offset and the values are a list of instruction offsets which can get run before that instruction . This includes jump instructions as well as non - jump instructions . Therefore the keys of the dictionary are reachible instructions . The values of the dictionary may be useful in control - flow analysis .
24,280
def dis ( self , x = None , file = None ) : self . _print ( self . Bytecode ( x ) . dis ( ) , file )
Disassemble classes methods functions generators or code .
24,281
def get_protocol_from_name ( name ) : cls = protocol_map . get ( name ) if not cls : raise ValueError ( 'Unsupported protocol "%s".' % name ) return cls
Returns the protocol class for the protocol with the given name .
24,282
def create_protocol ( name , ** kwargs ) : cls = protocol_map . get ( name ) if not cls : raise ValueError ( 'Unsupported protocol "%s".' % name ) return cls ( ** kwargs )
Returns an instance of the protocol with the given name .
24,283
def _long_from_raw ( thehash ) : hashnum = 0 for h in thehash : hashnum <<= 8 hashnum |= ord ( bytes ( [ h ] ) ) return hashnum
Fold to a long a digest supplied as a string .
24,284
def aborted ( self , exc_info ) : self . exc_info = exc_info self . did_end = True self . write ( format_exception ( * self . exc_info ) )
Called by a logger to log an exception .
24,285
def from_file ( filename , password = '' , keytype = None ) : if keytype is None : try : key = RSAKey . from_private_key_file ( filename ) keytype = 'rsa' except SSHException as e : try : key = DSSKey . from_private_key_file ( filename ) keytype = 'dss' except SSHException as e : msg = 'not a recognized private key: ' + repr ( filename ) raise ValueError ( msg ) key = PrivateKey ( keytype ) key . filename = filename key . password = password return key
Returns a new PrivateKey instance with the given attributes . If keytype is None we attempt to automatically detect the type .
24,286
def get_uri ( self ) : url = Url ( ) url . protocol = self . get_protocol ( ) url . hostname = self . get_address ( ) url . port = self . get_tcp_port ( ) url . vars = dict ( ( k , to_list ( v ) ) for ( k , v ) in list ( self . get_all ( ) . items ( ) ) if isinstance ( v , str ) or isinstance ( v , list ) ) if self . account : url . username = self . account . get_name ( ) url . password1 = self . account . get_password ( ) url . password2 = self . account . authorization_password return str ( url )
Returns a URI formatted representation of the host including all of it s attributes except for the name . Uses the address not the name of the host to build the URI .
24,287
def set_address ( self , address ) : if is_ip ( address ) : self . address = clean_ip ( address ) else : self . address = address
Set the address of the remote host the is contacted without changing hostname username password protocol and TCP port number . This is the actual address that is used to open the connection .
24,288
def get_option ( self , name , default = None ) : if self . options is None : return default return self . options . get ( name , default )
Returns the value of the given option if it is defined returns the given default value otherwise .
24,289
def set_tcp_port ( self , tcp_port ) : if tcp_port is None : self . tcp_port = None return self . tcp_port = int ( tcp_port )
Defines the TCP port number .
24,290
def append ( self , name , value ) : if self . vars is None : self . vars = { } if name in self . vars : self . vars [ name ] . append ( value ) else : self . vars [ name ] = [ value ]
Appends the given value to the list variable with the given name .
24,291
def get ( self , name , default = None ) : if self . vars is None : return default return self . vars . get ( name , default )
Returns the value of the given variable or the given default value if the variable is not defined .
24,292
def copy_labels ( src , dst ) : labels = src . __dict__ . get ( '_labels' ) if labels is None : return dst . __dict__ [ '_labels' ] = labels . copy ( )
Copies all labels of one object to another object .
24,293
def serializeable_exc_info ( thetype , ex , tb ) : return thetype , ex , '' . join ( traceback . format_exception ( thetype , ex , tb ) )
Since traceback objects can not be pickled this function manipulates exception info tuples before they are passed accross process boundaries .
24,294
def deprecated ( func ) : def decorated ( * args , ** kwargs ) : warnings . warn ( 'Call to deprecated function %s.' % func . __name__ , category = DeprecationWarning , stacklevel = 2 ) return func ( * args , ** kwargs ) decorated . __name__ = func . __name__ decorated . __doc__ = func . __doc__ decorated . __dict__ . update ( func . __dict__ ) return decorated
A decorator for marking functions as deprecated . Results in a printed warning message when the function is used .
24,295
def synchronized ( func ) : @ wraps ( func ) def wrapped ( self , * args , ** kwargs ) : try : rlock = self . _sync_lock except AttributeError : from multiprocessing import RLock rlock = self . __dict__ . setdefault ( '_sync_lock' , RLock ( ) ) with rlock : return func ( self , * args , ** kwargs ) return wrapped
Decorator for synchronizing method access .
24,296
def debug ( func ) : @ wraps ( func ) def wrapped ( * args , ** kwargs ) : arg = repr ( args ) + ' ' + repr ( kwargs ) sys . stdout . write ( 'Entering ' + func . __name__ + arg + '\n' ) try : result = func ( * args , ** kwargs ) except : sys . stdout . write ( 'Traceback caught:\n' ) sys . stdout . write ( format_exception ( * sys . exc_info ( ) ) ) raise arg = repr ( result ) sys . stdout . write ( 'Leaving ' + func . __name__ + '(): ' + arg + '\n' ) return result return wrapped
Decorator that prints a message whenever a function is entered or left .
24,297
def eval ( conn , string , strip_command = True , ** kwargs ) : parser_args = { 'strip_command' : strip_command } return _run ( conn , None , string , parser_args , ** kwargs )
Compiles the given template and executes it on the given connection . Raises an exception if the compilation fails .
24,298
def _urlparse_qs ( url ) : querystring = urlparse ( url ) [ 4 ] pairs = [ s2 for s1 in querystring . split ( '&' ) for s2 in s1 . split ( ';' ) ] result = OrderedDefaultDict ( list ) for name_value in pairs : pair = name_value . split ( '=' , 1 ) if len ( pair ) != 2 : continue if len ( pair [ 1 ] ) > 0 : name = _unquote ( pair [ 0 ] . replace ( '+' , ' ' ) ) value = _unquote ( pair [ 1 ] . replace ( '+' , ' ' ) ) result [ name ] . append ( value ) return result
Parse a URL query string and return the components as a dictionary .
24,299
def set_debug ( self , debug = 1 ) : self . _check_if_ready ( ) self . debug = debug self . main_loop . debug = debug
Set the debug level .