idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
20,100 | def raster_get_stats ( src_path , indexes = None , nodata = None , overview_level = None , max_size = 1024 , percentiles = ( 2 , 98 ) , dst_crs = CRS ( { "init" : "EPSG:4326" } ) , histogram_bins = 10 , histogram_range = None , ) : if isinstance ( indexes , int ) : indexes = [ indexes ] elif isinstance ( indexes , tuple ) : indexes = list ( indexes ) with rasterio . open ( src_path ) as src_dst : levels = src_dst . overviews ( 1 ) width = src_dst . width height = src_dst . height indexes = indexes if indexes else src_dst . indexes nodata = nodata if nodata is not None else src_dst . nodata bounds = transform_bounds ( * [ src_dst . crs , dst_crs ] + list ( src_dst . bounds ) , densify_pts = 21 ) minzoom , maxzoom = get_zooms ( src_dst ) def _get_descr ( ix ) : name = src_dst . descriptions [ ix - 1 ] if not name : name = "band{}" . format ( ix ) return name band_descriptions = [ ( ix , _get_descr ( ix ) ) for ix in indexes ] if len ( levels ) : if overview_level : decim = levels [ overview_level ] else : for ii , decim in enumerate ( levels ) : if max ( width // decim , height // decim ) < max_size : break else : decim = 1 warnings . warn ( "Dataset has no overviews, reading the full dataset" , NoOverviewWarning ) out_shape = ( len ( indexes ) , height // decim , width // decim ) vrt_params = dict ( add_alpha = True , resampling = Resampling . bilinear ) if has_alpha_band ( src_dst ) : vrt_params . update ( dict ( add_alpha = False ) ) if nodata is not None : vrt_params . update ( dict ( nodata = nodata , add_alpha = False , src_nodata = nodata ) ) with WarpedVRT ( src_dst , ** vrt_params ) as vrt : arr = vrt . read ( out_shape = out_shape , indexes = indexes , masked = True ) params = { } if histogram_bins : params . update ( dict ( bins = histogram_bins ) ) if histogram_range : params . update ( dict ( range = histogram_range ) ) stats = { indexes [ b ] : _stats ( arr [ b ] , percentiles = percentiles , ** params ) for b in range ( arr . shape [ 0 ] ) if vrt . colorinterp [ b ] != ColorInterp . alpha } return { "bounds" : { "value" : bounds , "crs" : dst_crs . to_string ( ) if isinstance ( dst_crs , CRS ) else dst_crs , } , "minzoom" : minzoom , "maxzoom" : maxzoom , "band_descriptions" : band_descriptions , "statistics" : stats , } | Retrieve dataset statistics . |
20,101 | def get_vrt_transform ( src_dst , bounds , bounds_crs = "epsg:3857" ) : dst_transform , _ , _ = calculate_default_transform ( src_dst . crs , bounds_crs , src_dst . width , src_dst . height , * src_dst . bounds ) w , s , e , n = bounds vrt_width = math . ceil ( ( e - w ) / dst_transform . a ) vrt_height = math . ceil ( ( s - n ) / dst_transform . e ) vrt_transform = transform . from_bounds ( w , s , e , n , vrt_width , vrt_height ) return vrt_transform , vrt_width , vrt_height | Calculate VRT transform . |
20,102 | def has_alpha_band ( src_dst ) : if ( any ( [ MaskFlags . alpha in flags for flags in src_dst . mask_flag_enums ] ) or ColorInterp . alpha in src_dst . colorinterp ) : return True return False | Check for alpha band or mask in source . |
20,103 | def linear_rescale ( image , in_range = ( 0 , 1 ) , out_range = ( 1 , 255 ) ) : imin , imax = in_range omin , omax = out_range image = np . clip ( image , imin , imax ) - imin image = image / np . float ( imax - imin ) return image * ( omax - omin ) + omin | Linear rescaling . |
20,104 | def tile_exists ( bounds , tile_z , tile_x , tile_y ) : mintile = mercantile . tile ( bounds [ 0 ] , bounds [ 3 ] , tile_z ) maxtile = mercantile . tile ( bounds [ 2 ] , bounds [ 1 ] , tile_z ) return ( ( tile_x <= maxtile . x + 1 ) and ( tile_x >= mintile . x ) and ( tile_y <= maxtile . y + 1 ) and ( tile_y >= mintile . y ) ) | Check if a mercatile tile is inside a given bounds . |
20,105 | def _apply_discrete_colormap ( arr , cmap ) : res = np . zeros ( ( arr . shape [ 1 ] , arr . shape [ 2 ] , 3 ) , dtype = np . uint8 ) for k , v in cmap . items ( ) : res [ arr [ 0 ] == k ] = v return np . transpose ( res , [ 2 , 0 , 1 ] ) | Apply discrete colormap . |
20,106 | def array_to_image ( arr , mask = None , img_format = "png" , color_map = None , ** creation_options ) : img_format = img_format . lower ( ) if len ( arr . shape ) < 3 : arr = np . expand_dims ( arr , axis = 0 ) if color_map is not None and isinstance ( color_map , dict ) : arr = _apply_discrete_colormap ( arr , color_map ) elif color_map is not None : arr = np . transpose ( color_map [ arr ] [ 0 ] , [ 2 , 0 , 1 ] ) . astype ( np . uint8 ) if img_format == "webp" and arr . shape [ 0 ] == 1 : arr = np . repeat ( arr , 3 , axis = 0 ) if mask is not None and img_format != "jpeg" : nbands = arr . shape [ 0 ] + 1 else : nbands = arr . shape [ 0 ] output_profile = dict ( driver = img_format , dtype = arr . dtype , count = nbands , height = arr . shape [ 1 ] , width = arr . shape [ 2 ] , ) output_profile . update ( creation_options ) with MemoryFile ( ) as memfile : with memfile . open ( ** output_profile ) as dst : dst . write ( arr , indexes = list ( range ( 1 , arr . shape [ 0 ] + 1 ) ) ) if mask is not None and img_format != "jpeg" : dst . write ( mask . astype ( arr . dtype ) , indexes = nbands ) return memfile . read ( ) | Translate numpy ndarray to image buffer using GDAL . |
20,107 | def get_colormap ( name = "cfastie" , format = "pil" ) : cmap_file = os . path . join ( os . path . dirname ( __file__ ) , "cmap" , "{0}.txt" . format ( name ) ) with open ( cmap_file ) as cmap : lines = cmap . read ( ) . splitlines ( ) colormap = [ list ( map ( int , line . split ( ) ) ) for line in lines if not line . startswith ( "#" ) ] [ 1 : ] cmap = list ( np . array ( colormap ) . flatten ( ) ) if format . lower ( ) == "pil" : return cmap elif format . lower ( ) == "gdal" : return np . array ( list ( _chunks ( cmap , 3 ) ) ) else : raise Exception ( "Unsupported {} colormap format" . format ( format ) ) | Return Pillow or GDAL compatible colormap array . |
20,108 | def mapzen_elevation_rgb ( arr ) : arr = np . clip ( arr + 32768.0 , 0.0 , 65535.0 ) r = arr / 256 g = arr % 256 b = ( arr * 256 ) % 256 return np . stack ( [ r , g , b ] ) . astype ( np . uint8 ) | Encode elevation value to RGB values compatible with Mapzen tangram . |
20,109 | def expression ( sceneid , tile_x , tile_y , tile_z , expr = None , ** kwargs ) : if not expr : raise Exception ( "Missing expression" ) bands_names = tuple ( set ( re . findall ( r"b(?P<bands>[0-9A]{1,2})" , expr ) ) ) rgb = expr . split ( "," ) if sceneid . startswith ( "L" ) : from rio_tiler . landsat8 import tile as l8_tile arr , mask = l8_tile ( sceneid , tile_x , tile_y , tile_z , bands = bands_names , ** kwargs ) elif sceneid . startswith ( "S2" ) : from rio_tiler . sentinel2 import tile as s2_tile arr , mask = s2_tile ( sceneid , tile_x , tile_y , tile_z , bands = bands_names , ** kwargs ) elif sceneid . startswith ( "CBERS" ) : from rio_tiler . cbers import tile as cbers_tile arr , mask = cbers_tile ( sceneid , tile_x , tile_y , tile_z , bands = bands_names , ** kwargs ) else : from rio_tiler . main import tile as main_tile bands = tuple ( map ( int , bands_names ) ) arr , mask = main_tile ( sceneid , tile_x , tile_y , tile_z , indexes = bands , ** kwargs ) ctx = { } for bdx , b in enumerate ( bands_names ) : ctx [ "b{}" . format ( b ) ] = arr [ bdx ] return ( np . array ( [ np . nan_to_num ( ne . evaluate ( bloc . strip ( ) , local_dict = ctx ) ) for bloc in rgb ] ) , mask , ) | Apply expression on data . |
20,110 | def _sentinel_parse_scene_id ( sceneid ) : if not re . match ( "^S2[AB]_tile_[0-9]{8}_[0-9]{2}[A-Z]{3}_[0-9]$" , sceneid ) : raise InvalidSentinelSceneId ( "Could not match {}" . format ( sceneid ) ) sentinel_pattern = ( r"^S" r"(?P<sensor>\w{1})" r"(?P<satellite>[AB]{1})" r"_tile_" r"(?P<acquisitionYear>[0-9]{4})" r"(?P<acquisitionMonth>[0-9]{2})" r"(?P<acquisitionDay>[0-9]{2})" r"_" r"(?P<utm>[0-9]{2})" r"(?P<lat>\w{1})" r"(?P<sq>\w{2})" r"_" r"(?P<num>[0-9]{1})$" ) meta = None match = re . match ( sentinel_pattern , sceneid , re . IGNORECASE ) if match : meta = match . groupdict ( ) utm_zone = meta [ "utm" ] . lstrip ( "0" ) grid_square = meta [ "sq" ] latitude_band = meta [ "lat" ] year = meta [ "acquisitionYear" ] month = meta [ "acquisitionMonth" ] . lstrip ( "0" ) day = meta [ "acquisitionDay" ] . lstrip ( "0" ) img_num = meta [ "num" ] meta [ "key" ] = "tiles/{}/{}/{}/{}/{}/{}/{}" . format ( utm_zone , latitude_band , grid_square , year , month , day , img_num ) meta [ "scene" ] = sceneid return meta | Parse Sentinel - 2 scene id . |
20,111 | def tile ( sceneid , tile_x , tile_y , tile_z , bands = ( "04" , "03" , "02" ) , tilesize = 256 ) : if not isinstance ( bands , tuple ) : bands = tuple ( ( bands , ) ) for band in bands : if band not in SENTINEL_BANDS : raise InvalidBandName ( "{} is not a valid Sentinel band name" . format ( band ) ) scene_params = _sentinel_parse_scene_id ( sceneid ) sentinel_address = "{}/{}" . format ( SENTINEL_BUCKET , scene_params [ "key" ] ) sentinel_preview = "{}/preview.jp2" . format ( sentinel_address ) with rasterio . open ( sentinel_preview ) as src : wgs_bounds = transform_bounds ( * [ src . crs , "epsg:4326" ] + list ( src . bounds ) , densify_pts = 21 ) if not utils . tile_exists ( wgs_bounds , tile_z , tile_x , tile_y ) : raise TileOutsideBounds ( "Tile {}/{}/{} is outside image bounds" . format ( tile_z , tile_x , tile_y ) ) mercator_tile = mercantile . Tile ( x = tile_x , y = tile_y , z = tile_z ) tile_bounds = mercantile . xy_bounds ( mercator_tile ) addresses = [ "{}/B{}.jp2" . format ( sentinel_address , band ) for band in bands ] _tiler = partial ( utils . tile_read , bounds = tile_bounds , tilesize = tilesize , nodata = 0 ) with futures . ThreadPoolExecutor ( max_workers = MAX_THREADS ) as executor : data , masks = zip ( * list ( executor . map ( _tiler , addresses ) ) ) mask = np . all ( masks , axis = 0 ) . astype ( np . uint8 ) * 255 return np . concatenate ( data ) , mask | Create mercator tile from Sentinel - 2 data . |
20,112 | def _landsat_get_mtl ( sceneid ) : scene_params = _landsat_parse_scene_id ( sceneid ) meta_file = "http://landsat-pds.s3.amazonaws.com/{}_MTL.txt" . format ( scene_params [ "key" ] ) metadata = str ( urlopen ( meta_file ) . read ( ) . decode ( ) ) return toa_utils . _parse_mtl_txt ( metadata ) | Get Landsat - 8 MTL metadata . |
20,113 | def _landsat_parse_scene_id ( sceneid ) : pre_collection = r"(L[COTEM]8\d{6}\d{7}[A-Z]{3}\d{2})" collection_1 = r"(L[COTEM]08_L\d{1}[A-Z]{2}_\d{6}_\d{8}_\d{8}_\d{2}_(T1|T2|RT))" if not re . match ( "^{}|{}$" . format ( pre_collection , collection_1 ) , sceneid ) : raise InvalidLandsatSceneId ( "Could not match {}" . format ( sceneid ) ) precollection_pattern = ( r"^L" r"(?P<sensor>\w{1})" r"(?P<satellite>\w{1})" r"(?P<path>[0-9]{3})" r"(?P<row>[0-9]{3})" r"(?P<acquisitionYear>[0-9]{4})" r"(?P<acquisitionJulianDay>[0-9]{3})" r"(?P<groundStationIdentifier>\w{3})" r"(?P<archiveVersion>[0-9]{2})$" ) collection_pattern = ( r"^L" r"(?P<sensor>\w{1})" r"(?P<satellite>\w{2})" r"_" r"(?P<processingCorrectionLevel>\w{4})" r"_" r"(?P<path>[0-9]{3})" r"(?P<row>[0-9]{3})" r"_" r"(?P<acquisitionYear>[0-9]{4})" r"(?P<acquisitionMonth>[0-9]{2})" r"(?P<acquisitionDay>[0-9]{2})" r"_" r"(?P<processingYear>[0-9]{4})" r"(?P<processingMonth>[0-9]{2})" r"(?P<processingDay>[0-9]{2})" r"_" r"(?P<collectionNumber>\w{2})" r"_" r"(?P<collectionCategory>\w{2})$" ) meta = None for pattern in [ collection_pattern , precollection_pattern ] : match = re . match ( pattern , sceneid , re . IGNORECASE ) if match : meta = match . groupdict ( ) break if meta . get ( "acquisitionJulianDay" ) : date = datetime . datetime ( int ( meta [ "acquisitionYear" ] ) , 1 , 1 ) + datetime . timedelta ( int ( meta [ "acquisitionJulianDay" ] ) - 1 ) meta [ "date" ] = date . strftime ( "%Y-%m-%d" ) else : meta [ "date" ] = "{}-{}-{}" . format ( meta [ "acquisitionYear" ] , meta [ "acquisitionMonth" ] , meta [ "acquisitionDay" ] ) collection = meta . get ( "collectionNumber" , "" ) if collection != "" : collection = "c{}" . format ( int ( collection ) ) meta [ "key" ] = os . path . join ( collection , "L8" , meta [ "path" ] , meta [ "row" ] , sceneid , sceneid ) meta [ "scene" ] = sceneid return meta | Parse Landsat - 8 scene id . |
20,114 | def _landsat_stats ( band , address_prefix , metadata , overview_level = None , max_size = 1024 , percentiles = ( 2 , 98 ) , dst_crs = CRS ( { "init" : "EPSG:4326" } ) , histogram_bins = 10 , histogram_range = None , ) : src_path = "{}_B{}.TIF" . format ( address_prefix , band ) with rasterio . open ( src_path ) as src : levels = src . overviews ( 1 ) width = src . width height = src . height bounds = transform_bounds ( * [ src . crs , dst_crs ] + list ( src . bounds ) , densify_pts = 21 ) if len ( levels ) : if overview_level : decim = levels [ overview_level ] else : for ii , decim in enumerate ( levels ) : if max ( width // decim , height // decim ) < max_size : break else : decim = 1 warnings . warn ( "Dataset has no overviews, reading the full dataset" , NoOverviewWarning ) out_shape = ( height // decim , width // decim ) vrt_params = dict ( nodata = 0 , add_alpha = False , src_nodata = 0 , init_dest_nodata = False ) with WarpedVRT ( src , ** vrt_params ) as vrt : arr = vrt . read ( out_shape = out_shape , indexes = [ 1 ] , masked = True ) if band in [ "10" , "11" ] : multi_rad = metadata [ "RADIOMETRIC_RESCALING" ] . get ( "RADIANCE_MULT_BAND_{}" . format ( band ) ) add_rad = metadata [ "RADIOMETRIC_RESCALING" ] . get ( "RADIANCE_ADD_BAND_{}" . format ( band ) ) k1 = metadata [ "TIRS_THERMAL_CONSTANTS" ] . get ( "K1_CONSTANT_BAND_{}" . format ( band ) ) k2 = metadata [ "TIRS_THERMAL_CONSTANTS" ] . get ( "K2_CONSTANT_BAND_{}" . format ( band ) ) arr = brightness_temp . brightness_temp ( arr , multi_rad , add_rad , k1 , k2 ) else : multi_reflect = metadata [ "RADIOMETRIC_RESCALING" ] . get ( "REFLECTANCE_MULT_BAND_{}" . format ( band ) ) add_reflect = metadata [ "RADIOMETRIC_RESCALING" ] . get ( "REFLECTANCE_ADD_BAND_{}" . format ( band ) ) sun_elev = metadata [ "IMAGE_ATTRIBUTES" ] [ "SUN_ELEVATION" ] arr = 10000 * reflectance . reflectance ( arr , multi_reflect , add_reflect , sun_elev , src_nodata = 0 ) params = { } if histogram_bins : params . update ( dict ( bins = histogram_bins ) ) if histogram_range : params . update ( dict ( range = histogram_range ) ) stats = { band : utils . _stats ( arr , percentiles = percentiles , ** params ) } return { "bounds" : { "value" : bounds , "crs" : dst_crs . to_string ( ) if isinstance ( dst_crs , CRS ) else dst_crs , } , "statistics" : stats , } | Retrieve landsat dataset statistics . |
20,115 | def tile ( sceneid , tile_x , tile_y , tile_z , bands = ( "4" , "3" , "2" ) , tilesize = 256 , pan = False ) : if not isinstance ( bands , tuple ) : bands = tuple ( ( bands , ) ) for band in bands : if band not in LANDSAT_BANDS : raise InvalidBandName ( "{} is not a valid Landsat band name" . format ( band ) ) scene_params = _landsat_parse_scene_id ( sceneid ) meta_data = _landsat_get_mtl ( sceneid ) . get ( "L1_METADATA_FILE" ) landsat_address = "{}/{}" . format ( LANDSAT_BUCKET , scene_params [ "key" ] ) wgs_bounds = toa_utils . _get_bounds_from_metadata ( meta_data [ "PRODUCT_METADATA" ] ) if not utils . tile_exists ( wgs_bounds , tile_z , tile_x , tile_y ) : raise TileOutsideBounds ( "Tile {}/{}/{} is outside image bounds" . format ( tile_z , tile_x , tile_y ) ) mercator_tile = mercantile . Tile ( x = tile_x , y = tile_y , z = tile_z ) tile_bounds = mercantile . xy_bounds ( mercator_tile ) addresses = [ "{}_B{}.TIF" . format ( landsat_address , band ) for band in bands ] _tiler = partial ( utils . tile_read , bounds = tile_bounds , tilesize = tilesize , nodata = 0 ) with futures . ThreadPoolExecutor ( max_workers = MAX_THREADS ) as executor : data , masks = zip ( * list ( executor . map ( _tiler , addresses ) ) ) data = np . concatenate ( data ) mask = np . all ( masks , axis = 0 ) . astype ( np . uint8 ) * 255 if pan : pan_address = "{}_B8.TIF" . format ( landsat_address ) matrix_pan , mask = utils . tile_read ( pan_address , tile_bounds , tilesize , nodata = 0 ) data = utils . pansharpening_brovey ( data , matrix_pan , 0.2 , matrix_pan . dtype ) sun_elev = meta_data [ "IMAGE_ATTRIBUTES" ] [ "SUN_ELEVATION" ] for bdx , band in enumerate ( bands ) : if int ( band ) > 9 : multi_rad = meta_data [ "RADIOMETRIC_RESCALING" ] . get ( "RADIANCE_MULT_BAND_{}" . format ( band ) ) add_rad = meta_data [ "RADIOMETRIC_RESCALING" ] . get ( "RADIANCE_ADD_BAND_{}" . format ( band ) ) k1 = meta_data [ "TIRS_THERMAL_CONSTANTS" ] . get ( "K1_CONSTANT_BAND_{}" . format ( band ) ) k2 = meta_data [ "TIRS_THERMAL_CONSTANTS" ] . get ( "K2_CONSTANT_BAND_{}" . format ( band ) ) data [ bdx ] = brightness_temp . brightness_temp ( data [ bdx ] , multi_rad , add_rad , k1 , k2 ) else : multi_reflect = meta_data [ "RADIOMETRIC_RESCALING" ] . get ( "REFLECTANCE_MULT_BAND_{}" . format ( band ) ) add_reflect = meta_data [ "RADIOMETRIC_RESCALING" ] . get ( "REFLECTANCE_ADD_BAND_{}" . format ( band ) ) data [ bdx ] = 10000 * reflectance . reflectance ( data [ bdx ] , multi_reflect , add_reflect , sun_elev ) return data , mask | Create mercator tile from Landsat - 8 data . |
20,116 | def _cbers_parse_scene_id ( sceneid ) : if not re . match ( r"^CBERS_4_\w+_[0-9]{8}_[0-9]{3}_[0-9]{3}_L[0-9]$" , sceneid ) : raise InvalidCBERSSceneId ( "Could not match {}" . format ( sceneid ) ) cbers_pattern = ( r"(?P<satellite>\w+)_" r"(?P<mission>[0-9]{1})" r"_" r"(?P<instrument>\w+)" r"_" r"(?P<acquisitionYear>[0-9]{4})" r"(?P<acquisitionMonth>[0-9]{2})" r"(?P<acquisitionDay>[0-9]{2})" r"_" r"(?P<path>[0-9]{3})" r"_" r"(?P<row>[0-9]{3})" r"_" r"(?P<processingCorrectionLevel>L[0-9]{1})$" ) meta = None match = re . match ( cbers_pattern , sceneid , re . IGNORECASE ) if match : meta = match . groupdict ( ) path = meta [ "path" ] row = meta [ "row" ] instrument = meta [ "instrument" ] meta [ "key" ] = "CBERS4/{}/{}/{}/{}" . format ( instrument , path , row , sceneid ) meta [ "scene" ] = sceneid instrument_params = { "MUX" : { "reference_band" : "6" , "bands" : [ "5" , "6" , "7" , "8" ] , "rgb" : ( "7" , "6" , "5" ) , } , "AWFI" : { "reference_band" : "14" , "bands" : [ "13" , "14" , "15" , "16" ] , "rgb" : ( "15" , "14" , "13" ) , } , "PAN10M" : { "reference_band" : "4" , "bands" : [ "2" , "3" , "4" ] , "rgb" : ( "3" , "4" , "2" ) , } , "PAN5M" : { "reference_band" : "1" , "bands" : [ "1" ] , "rgb" : ( "1" , "1" , "1" ) } , } meta [ "reference_band" ] = instrument_params [ instrument ] [ "reference_band" ] meta [ "bands" ] = instrument_params [ instrument ] [ "bands" ] meta [ "rgb" ] = instrument_params [ instrument ] [ "rgb" ] return meta | Parse CBERS scene id . |
20,117 | def metadata ( sceneid , pmin = 2 , pmax = 98 , ** kwargs ) : scene_params = _cbers_parse_scene_id ( sceneid ) cbers_address = "{}/{}" . format ( CBERS_BUCKET , scene_params [ "key" ] ) bands = scene_params [ "bands" ] ref_band = scene_params [ "reference_band" ] info = { "sceneid" : sceneid } addresses = [ "{}/{}_BAND{}.tif" . format ( cbers_address , sceneid , band ) for band in bands ] _stats_worker = partial ( utils . raster_get_stats , indexes = [ 1 ] , nodata = 0 , overview_level = 2 , percentiles = ( pmin , pmax ) , ** kwargs ) with futures . ThreadPoolExecutor ( max_workers = MAX_THREADS ) as executor : responses = list ( executor . map ( _stats_worker , addresses ) ) info [ "bounds" ] = [ r [ "bounds" ] for b , r in zip ( bands , responses ) if b == ref_band ] [ 0 ] info [ "statistics" ] = { b : v for b , d in zip ( bands , responses ) for k , v in d [ "statistics" ] . items ( ) } return info | Return band bounds and statistics . |
20,118 | def __datasets_desc ( ) : datasets = __get_data_folder_path ( ) + 'datasets.csv' df = pd . read_csv ( datasets ) df = df [ [ 'Item' , 'Title' ] ] df . columns = [ 'dataset_id' , 'title' ] return df | return a df of the available datasets with description |
20,119 | def dumb_property_dict ( style ) : return dict ( [ ( x . strip ( ) , y . strip ( ) ) for x , y in [ z . split ( ':' , 1 ) for z in style . split ( ';' ) if ':' in z ] ] ) | returns a hash of css attributes |
20,120 | def dumb_css_parser ( data ) : data += ';' importIndex = data . find ( '@import' ) while importIndex != - 1 : data = data [ 0 : importIndex ] + data [ data . find ( ';' , importIndex ) + 1 : ] importIndex = data . find ( '@import' ) elements = [ x . split ( '{' ) for x in data . split ( '}' ) if '{' in x . strip ( ) ] try : elements = dict ( [ ( a . strip ( ) , dumb_property_dict ( b ) ) for a , b in elements ] ) except ValueError : elements = { } return elements | returns a hash of css selectors each of which contains a hash of css attributes |
20,121 | def element_style ( attrs , style_def , parent_style ) : style = parent_style . copy ( ) if 'class' in attrs : for css_class in attrs [ 'class' ] . split ( ) : css_style = style_def [ '.' + css_class ] style . update ( css_style ) if 'style' in attrs : immediate_style = dumb_property_dict ( attrs [ 'style' ] ) style . update ( immediate_style ) return style | returns a hash of the final style attributes of the element |
20,122 | def google_text_emphasis ( style ) : emphasis = [ ] if 'text-decoration' in style : emphasis . append ( style [ 'text-decoration' ] ) if 'font-style' in style : emphasis . append ( style [ 'font-style' ] ) if 'font-weight' in style : emphasis . append ( style [ 'font-weight' ] ) return emphasis | return a list of all emphasis modifiers of the element |
20,123 | def google_fixed_width_font ( style ) : font_family = '' if 'font-family' in style : font_family = style [ 'font-family' ] if 'Courier New' == font_family or 'Consolas' == font_family : return True return False | check if the css of the current element defines a fixed width font |
20,124 | def escape_md_section ( text , snob = False ) : text = md_backslash_matcher . sub ( r"\\\1" , text ) if snob : text = md_chars_matcher_all . sub ( r"\\\1" , text ) text = md_dot_matcher . sub ( r"\1\\\2" , text ) text = md_plus_matcher . sub ( r"\1\\\2" , text ) text = md_dash_matcher . sub ( r"\1\\\2" , text ) return text | Escapes markdown - sensitive characters across whole document sections . |
20,125 | def handle_emphasis ( self , start , tag_style , parent_style ) : tag_emphasis = google_text_emphasis ( tag_style ) parent_emphasis = google_text_emphasis ( parent_style ) strikethrough = 'line-through' in tag_emphasis and self . hide_strikethrough bold = 'bold' in tag_emphasis and not 'bold' in parent_emphasis italic = 'italic' in tag_emphasis and not 'italic' in parent_emphasis fixed = google_fixed_width_font ( tag_style ) and not google_fixed_width_font ( parent_style ) and not self . pre if start : if bold or italic or fixed : self . emphasis += 1 if strikethrough : self . quiet += 1 if italic : self . o ( self . emphasis_mark ) self . drop_white_space += 1 if bold : self . o ( self . strong_mark ) self . drop_white_space += 1 if fixed : self . o ( '`' ) self . drop_white_space += 1 self . code = True else : if bold or italic or fixed : self . emphasis -= 1 self . space = 0 self . outtext = self . outtext . rstrip ( ) if fixed : if self . drop_white_space : self . drop_last ( 1 ) self . drop_white_space -= 1 else : self . o ( '`' ) self . code = False if bold : if self . drop_white_space : self . drop_last ( 2 ) self . drop_white_space -= 1 else : self . o ( self . strong_mark ) if italic : if self . drop_white_space : self . drop_last ( 1 ) self . drop_white_space -= 1 else : self . o ( self . emphasis_mark ) if ( bold or italic ) and not self . emphasis : self . o ( " " ) if strikethrough : self . quiet -= 1 | handles various text emphases |
20,126 | def google_nest_count ( self , style ) : nest_count = 0 if 'margin-left' in style : nest_count = int ( style [ 'margin-left' ] [ : - 2 ] ) / self . google_list_indent return nest_count | calculate the nesting count of google doc lists |
20,127 | def optwrap ( self , text ) : if not self . body_width : return text assert wrap , "Requires Python 2.3." result = '' newlines = 0 for para in text . split ( "\n" ) : if len ( para ) > 0 : if not skipwrap ( para ) : result += "\n" . join ( wrap ( para , self . body_width ) ) if para . endswith ( ' ' ) : result += " \n" newlines = 1 else : result += "\n\n" newlines = 2 else : if not onlywhite ( para ) : result += para + "\n" newlines = 1 else : if newlines < 2 : result += "\n" newlines += 1 return result | Wrap all paragraphs in the provided text . |
20,128 | def similarity ( w1 , w2 , threshold = 0.5 ) : ratio = SM ( None , str ( w1 ) . lower ( ) , str ( w2 ) . lower ( ) ) . ratio ( ) return ratio if ratio > threshold else 0 | compare two strings words and return ratio of smiliarity be it larger than the threshold or 0 otherwise . |
20,129 | def triangle_area ( p0 , p1 , p2 ) : if p2 . ndim < 2 : p2 = p2 [ np . newaxis , : ] area = 0.5 * np . abs ( p0 [ 0 ] * p1 [ 1 ] - p0 [ 0 ] * p2 [ : , 1 ] + p1 [ 0 ] * p2 [ : , 1 ] - p1 [ 0 ] * p0 [ 1 ] + p2 [ : , 0 ] * p0 [ 1 ] - p2 [ : , 0 ] * p1 [ 1 ] ) return area | p2 can be a vector |
20,130 | def inROI ( self , Y ) : if Y . ndim > 1 : area = np . zeros ( ( Y . shape [ 0 ] , 4 ) ) else : area = np . zeros ( ( 1 , 4 ) ) pts = np . zeros ( ( 0 , ) , int ) pdist = np . zeros ( ( 0 , ) , int ) dist0 = 0 for k in range ( len ( self . prect ) ) : self . square_area = ( triangle_area ( self . prect [ k ] [ 0 , : ] , self . prect [ k ] [ 1 , : ] , self . prect [ k ] [ 2 , : ] ) + triangle_area ( self . prect [ k ] [ 2 , : ] , self . prect [ k ] [ 3 , : ] , self . prect [ k ] [ 4 , : ] ) ) for n in range ( 4 ) : area [ : , n ] = triangle_area ( self . prect [ k ] [ 0 + n , : ] , self . prect [ k ] [ 1 + n , : ] , Y ) newpts = np . array ( ( area . sum ( axis = 1 ) <= self . square_area + 1e-5 ) . nonzero ( ) ) . flatten ( ) . astype ( int ) if newpts . size > 0 : pts = np . concatenate ( ( pts , newpts ) ) newdists = self . orthproj ( Y [ newpts , : ] , k ) + dist0 pdist = np . concatenate ( ( pdist , newdists ) ) dist0 += ( np . diff ( self . pos [ k ] , axis = 0 ) [ 0 , : ] ** 2 ) . sum ( ) if k < len ( self . prect ) - 1 : pcent = self . pos [ k ] [ 1 , : ] dist = ( ( Y - pcent [ np . newaxis , : ] ) ** 2 ) . sum ( axis = 1 ) ** 0.5 newpts = np . array ( ( dist <= self . d ) . nonzero ( ) [ 0 ] . astype ( int ) ) if newpts . size > 0 : pts = np . concatenate ( ( pts , newpts ) ) newdists = dist0 * np . ones ( newpts . shape ) pdist = np . concatenate ( ( pdist , newdists ) ) pts , inds = np . unique ( pts , return_index = True ) pdist = pdist [ inds ] return pts , pdist | which points are inside ROI |
20,131 | def dwrap ( kx , nc ) : q1 = np . mod ( kx , nc ) q2 = np . minimum ( q1 , nc - q1 ) return q2 | compute a wrapped distance |
20,132 | def transform ( self , X ) : iclustup = [ ] dims = self . n_components if hasattr ( self , 'isort1' ) : if X . shape [ 1 ] == self . v . shape [ 0 ] : X = X @ self . v nclust = self . n_X AtS = self . A . T @ self . S vnorm = np . sum ( self . S * ( self . A @ AtS ) , axis = 0 ) [ np . newaxis , : ] cv = X @ AtS cmap = np . maximum ( 0. , cv ) ** 2 / vnorm iclustup , cmax = upsample ( np . sqrt ( cmap ) , dims , nclust , 10 ) else : print ( 'ERROR: new points do not have as many features as original data' ) else : print ( 'ERROR: need to fit model first before you can embed new points' ) if iclustup . ndim > 1 : iclustup = iclustup . T else : iclustup = iclustup . flatten ( ) return iclustup | if already fit can add new points and see where they fall |
20,133 | def goingLive ( self , ctx , client ) : self . ctx = ctx self . client = client | Overrides nevow method ; not really safe to just save ctx client in self for multiple clients but nice and simple . |
20,134 | def _router_address ( self , data ) : args = data . split ( ) [ 1 : ] try : self . _relay_attrs [ 'ip_v6' ] . extend ( args ) except KeyError : self . _relay_attrs [ 'ip_v6' ] = list ( args ) | only for IPv6 addresses |
20,135 | def _notify ( self , func , * args , ** kw ) : for x in self . listeners : try : getattr ( x , func ) ( * args , ** kw ) except Exception : log . err ( ) | Internal helper . Calls the IStreamListener function func with the given args guarding around errors . |
20,136 | def build_timeout_circuit ( tor_state , reactor , path , timeout , using_guards = False ) : timed_circuit = [ ] d = tor_state . build_circuit ( routers = path , using_guards = using_guards ) def get_circuit ( c ) : timed_circuit . append ( c ) return c def trap_cancel ( f ) : f . trap ( defer . CancelledError ) if timed_circuit : d2 = timed_circuit [ 0 ] . close ( ) else : d2 = defer . succeed ( None ) d2 . addCallback ( lambda _ : Failure ( CircuitBuildTimedOutError ( "circuit build timed out" ) ) ) return d2 d . addCallback ( get_circuit ) d . addCallback ( lambda circ : circ . when_built ( ) ) d . addErrback ( trap_cancel ) reactor . callLater ( timeout , d . cancel ) return d | Build a new circuit within a timeout . |
20,137 | def age ( self , now = None ) : if not self . time_created : return None if now is None : now = datetime . utcnow ( ) return ( now - self . time_created ) . seconds | Returns an integer which is the difference in seconds from now to when this circuit was created . |
20,138 | def _parse_version_reply ( self ) : "waiting for a version reply" if len ( self . _data ) >= 2 : reply = self . _data [ : 2 ] self . _data = self . _data [ 2 : ] ( version , method ) = struct . unpack ( 'BB' , reply ) if version == 5 and method in [ 0x00 , 0x02 ] : self . version_reply ( method ) else : if version != 5 : self . version_error ( SocksError ( "Expected version 5, got {}" . format ( version ) ) ) else : self . version_error ( SocksError ( "Wanted method 0 or 2, got {}" . format ( method ) ) ) | waiting for a version reply |
20,139 | def _parse_request_reply ( self ) : "waiting for a reply to our request" if len ( self . _data ) < 8 : return msg = self . _data [ : 4 ] ( version , reply , _ , typ ) = struct . unpack ( 'BBBB' , msg ) if version != 5 : self . reply_error ( SocksError ( "Expected version 5, got {}" . format ( version ) ) ) return if reply != self . SUCCEEDED : self . reply_error ( _create_socks_error ( reply ) ) return reply_dispatcher = { self . REPLY_IPV4 : self . _parse_ipv4_reply , self . REPLY_HOST : self . _parse_domain_name_reply , self . REPLY_IPV6 : self . _parse_ipv6_reply , } try : method = reply_dispatcher [ typ ] except KeyError : self . reply_error ( SocksError ( "Unexpected response type {}" . format ( typ ) ) ) return method ( ) | waiting for a reply to our request |
20,140 | def _make_connection ( self , addr , port ) : "make our proxy connection" sender = self . _create_connection ( addr , port ) self . _sender = sender self . _when_done . fire ( sender ) | make our proxy connection |
20,141 | def _relay_data ( self ) : "relay any data we have" if self . _data : d = self . _data self . _data = b'' self . _sender . dataReceived ( d ) | relay any data we have |
20,142 | def _send_connect_request ( self ) : "sends CONNECT request" host = self . _addr . host port = self . _addr . port if isinstance ( self . _addr , ( IPv4Address , IPv6Address ) ) : is_v6 = isinstance ( self . _addr , IPv6Address ) self . _data_to_send ( struct . pack ( '!BBBB4sH' , 5 , 0x01 , 0x00 , 0x04 if is_v6 else 0x01 , inet_pton ( AF_INET6 if is_v6 else AF_INET , host ) , port , ) ) else : host = host . encode ( 'ascii' ) self . _data_to_send ( struct . pack ( '!BBBBB{}sH' . format ( len ( host ) ) , 5 , 0x01 , 0x00 , 0x03 , len ( host ) , host , port , ) ) | sends CONNECT request |
20,143 | def get_location ( self ) : if self . _location : return succeed ( self . _location ) if self . ip != 'unknown' : self . _location = NetLocation ( self . ip ) else : self . _location = NetLocation ( None ) if not self . _location . countrycode and self . ip != 'unknown' : d = self . controller . get_info_raw ( 'ip-to-country/' + self . ip ) d . addCallback ( self . _set_country ) d . addCallback ( lambda _ : self . _location ) return d return succeed ( self . _location ) | Returns a Deferred that fires with a NetLocation object for this router . |
20,144 | def policy ( self , args ) : word = args [ 0 ] if word == 'reject' : self . accepted_ports = None self . rejected_ports = [ ] target = self . rejected_ports elif word == 'accept' : self . accepted_ports = [ ] self . rejected_ports = None target = self . accepted_ports else : raise RuntimeError ( "Don't understand policy word \"%s\"" % word ) for port in args [ 1 ] . split ( ',' ) : if '-' in port : ( a , b ) = port . split ( '-' ) target . append ( PortRange ( int ( a ) , int ( b ) ) ) else : target . append ( int ( port ) ) | setter for the policy descriptor |
20,145 | def accepts_port ( self , port ) : if self . rejected_ports is None and self . accepted_ports is None : raise RuntimeError ( "policy hasn't been set yet" ) if self . rejected_ports : for x in self . rejected_ports : if port == x : return False return True for x in self . accepted_ports : if port == x : return True return False | Query whether this Router will accept the given port . |
20,146 | def _set_country ( self , c ) : self . location . countrycode = c . split ( ) [ 0 ] . split ( '=' ) [ 1 ] . strip ( ) . upper ( ) | callback if we used Tor s GETINFO ip - to - country |
20,147 | def _load_private_key_file ( fname ) : with open ( fname , "rb" ) as f : data = f . read ( ) if b"\x00\x00\x00" in data : blob = data [ data . find ( b"\x00\x00\x00" ) + 3 : ] return u"ED25519-V3:{}" . format ( b2a_base64 ( blob . strip ( ) ) . decode ( 'ascii' ) . strip ( ) ) if b"-----BEGIN RSA PRIVATE KEY-----" in data : blob = "" . join ( data . decode ( 'ascii' ) . split ( '\n' ) [ 1 : - 2 ] ) return u"RSA1024:{}" . format ( blob ) blob = data . decode ( 'ascii' ) . strip ( ) if ':' in blob : kind , key = blob . split ( ':' , 1 ) if kind in [ 'ED25519-V3' , 'RSA1024' ] : return blob raise ValueError ( "'{}' does not appear to contain v2 or v3 private key data" . format ( fname , ) ) | Loads an onion - service private - key from the given file . This can be either a key blog as returned from a previous ADD_ONION call or a v3 or v2 file as created by Tor when using the HiddenServiceDir directive . |
20,148 | def config_attributes ( self ) : rtn = [ ( 'HiddenServiceDir' , str ( self . dir ) ) ] if self . conf . _supports [ 'HiddenServiceDirGroupReadable' ] and self . group_readable : rtn . append ( ( 'HiddenServiceDirGroupReadable' , str ( 1 ) ) ) for port in self . ports : rtn . append ( ( 'HiddenServicePort' , str ( port ) ) ) if self . version : rtn . append ( ( 'HiddenServiceVersion' , str ( self . version ) ) ) for authline in self . authorize_client : rtn . append ( ( 'HiddenServiceAuthorizeClient' , str ( authline ) ) ) return rtn | Helper method used by TorConfig when generating a torrc file . |
20,149 | def add_to_tor ( self , protocol ) : upload_d = _await_descriptor_upload ( protocol , self , progress = None , await_all_uploads = False ) ports = ' ' . join ( map ( lambda x : 'Port=' + x . strip ( ) , self . _ports ) ) cmd = 'ADD_ONION %s %s' % ( self . _key_blob , ports ) ans = yield protocol . queue_command ( cmd ) ans = find_keywords ( ans . split ( '\n' ) ) self . hostname = ans [ 'ServiceID' ] + '.onion' if self . _key_blob . startswith ( 'NEW:' ) : self . private_key = ans [ 'PrivateKey' ] else : self . private_key = self . _key_blob log . msg ( 'Created hidden-service at' , self . hostname ) log . msg ( "Created '{}', waiting for descriptor uploads." . format ( self . hostname ) ) yield upload_d | Returns a Deferred which fires with self after at least one descriptor has been uploaded . Errback if no descriptor upload succeeds . |
20,150 | def remove_from_tor ( self , protocol ) : r = yield protocol . queue_command ( 'DEL_ONION %s' % self . hostname [ : - 6 ] ) if r . strip ( ) != 'OK' : raise RuntimeError ( 'Failed to remove hidden service: "%s".' % r ) | Returns a Deferred which fires with None |
20,151 | def from_protocol ( proto ) : cfg = TorConfig ( control = proto ) yield cfg . post_bootstrap defer . returnValue ( cfg ) | This creates and returns a ready - to - go TorConfig instance from the given protocol which should be an instance of TorControlProtocol . |
20,152 | def socks_endpoint ( self , reactor , port = None ) : if len ( self . SocksPort ) == 0 : raise RuntimeError ( "No SOCKS ports configured" ) socks_config = None if port is None : socks_config = self . SocksPort [ 0 ] else : port = str ( port ) if ' ' in port : raise ValueError ( "Can't specify options; use create_socks_endpoint instead" ) for idx , port_config in enumerate ( self . SocksPort ) : if port_config . split ( ) [ 0 ] == port : socks_config = port_config break if socks_config is None : raise RuntimeError ( "No SOCKSPort configured for port {}" . format ( port ) ) return _endpoint_from_socksport_line ( reactor , socks_config ) | Returns a TorSocksEndpoint configured to use an already - configured SOCKSPort from the Tor we re connected to . By default this will be the very first SOCKSPort . |
20,153 | def attach_protocol ( self , proto ) : if self . _protocol is not None : raise RuntimeError ( "Already have a protocol." ) self . save ( ) self . __dict__ [ '_protocol' ] = proto del self . __dict__ [ '_accept_all_' ] self . __dict__ [ 'post_bootstrap' ] = defer . Deferred ( ) if proto . post_bootstrap : proto . post_bootstrap . addCallback ( self . bootstrap ) return self . __dict__ [ 'post_bootstrap' ] | returns a Deferred that fires once we ve set this object up to track the protocol . Fails if we already have a protocol . |
20,154 | def get_type ( self , name ) : if name . lower ( ) == 'hiddenservices' : return FilesystemOnionService return type ( self . parsers [ name ] ) | return the type of a config key . |
20,155 | def bootstrap ( self , arg = None ) : try : d = self . protocol . add_event_listener ( 'CONF_CHANGED' , self . _conf_changed ) except RuntimeError : log . msg ( "Can't listen for CONF_CHANGED event; won't stay up-to-date " "with other clients." ) d = defer . succeed ( None ) d . addCallback ( lambda _ : self . protocol . get_info_raw ( "config/names" ) ) d . addCallback ( self . _do_setup ) d . addCallback ( self . do_post_bootstrap ) d . addErrback ( self . do_post_errback ) | This only takes args so it can be used as a callback . Don t pass an arg it is ignored . |
20,156 | def save ( self ) : if not self . needs_save ( ) : return defer . succeed ( self ) args = [ ] directories = [ ] for ( key , value ) in self . unsaved . items ( ) : if key == 'HiddenServices' : self . config [ 'HiddenServices' ] = value services = list ( ) for hs in value : if IOnionClient . providedBy ( hs ) : parent = IOnionClient ( hs ) . parent if parent not in services : services . append ( parent ) elif isinstance ( hs , ( EphemeralOnionService , EphemeralHiddenService ) ) : raise ValueError ( "Only filesystem based Onion services may be added" " via TorConfig.hiddenservices; ephemeral services" " must be created with 'create_onion_service'." ) else : if hs not in services : services . append ( hs ) for hs in services : for ( k , v ) in hs . config_attributes ( ) : if k == 'HiddenServiceDir' : if v not in directories : directories . append ( v ) args . append ( k ) args . append ( v ) else : raise RuntimeError ( "Trying to add hidden service with same HiddenServiceDir: %s" % v ) else : args . append ( k ) args . append ( v ) continue if isinstance ( value , list ) : for x in value : if x is not DEFAULT_VALUE : args . append ( key ) args . append ( str ( x ) ) else : args . append ( key ) args . append ( value ) real_name = self . _find_real_name ( key ) if not isinstance ( value , list ) and real_name in self . parsers : value = self . parsers [ real_name ] . parse ( value ) self . config [ real_name ] = value if self . protocol : d = self . protocol . set_conf ( * args ) d . addCallback ( self . _save_completed ) return d else : self . _save_completed ( ) return defer . succeed ( self ) | Save any outstanding items . This returns a Deferred which will errback if Tor was unhappy with anything or callback with this TorConfig object on success . |
20,157 | def _timeout_expired ( self ) : self . _did_timeout = True try : self . transport . signalProcess ( 'TERM' ) except error . ProcessExitedAlready : self . transport . loseConnection ( ) fail = Failure ( RuntimeError ( "timeout while launching Tor" ) ) self . _maybe_notify_connected ( fail ) | A timeout was supplied during setup and the time has run out . |
20,158 | def cleanup ( self ) : all ( [ delete_file_or_tree ( f ) for f in self . to_delete ] ) self . to_delete = [ ] | Clean up my temporary files . |
20,159 | def progress ( self , percent , tag , summary ) : if self . progress_updates : self . progress_updates ( percent , tag , summary ) | Can be overridden or monkey - patched if you want to get progress updates yourself . |
20,160 | def main ( reactor ) : control_ep = UNIXClientEndpoint ( reactor , '/var/run/tor/control' ) tor = yield txtorcon . connect ( reactor , control_ep ) state = yield tor . create_state ( ) print ( "Closing all circuits:" ) for circuit in list ( state . circuits . values ( ) ) : path = '->' . join ( map ( lambda r : r . id_hex , circuit . path ) ) print ( "Circuit {} through {}" . format ( circuit . id , path ) ) for stream in circuit . streams : print ( " Stream {} to {}" . format ( stream . id , stream . target_host ) ) yield stream . close ( ) print ( " closed" ) yield circuit . close ( ) print ( "closed" ) yield tor . quit ( ) | Close all open streams and circuits in the Tor we connect to |
20,161 | def update ( self , * args ) : gmtexpires = None ( name , ip , expires ) = args [ : 3 ] for arg in args : if arg . lower ( ) . startswith ( 'expires=' ) : gmtexpires = arg [ 8 : ] if gmtexpires is None : if len ( args ) == 3 : gmtexpires = expires else : if args [ 2 ] == 'NEVER' : gmtexpires = args [ 2 ] else : gmtexpires = args [ 3 ] self . name = name self . ip = maybe_ip_addr ( ip ) if self . ip == '<error>' : self . _expire ( ) return fmt = "%Y-%m-%d %H:%M:%S" oldexpires = self . expires if gmtexpires . upper ( ) == 'NEVER' : self . expires = None else : self . expires = datetime . datetime . strptime ( gmtexpires , fmt ) self . created = datetime . datetime . utcnow ( ) if self . expires is not None : if oldexpires is None : if self . expires <= self . created : diff = datetime . timedelta ( seconds = 0 ) else : diff = self . expires - self . created self . expiry = self . map . scheduler . callLater ( diff . seconds , self . _expire ) else : diff = self . expires - oldexpires self . expiry . delay ( diff . seconds ) | deals with an update from Tor ; see parsing logic in torcontroller |
20,162 | def _expire ( self ) : del self . map . addr [ self . name ] self . map . notify ( "addrmap_expired" , * [ self . name ] , ** { } ) | callback done via callLater |
20,163 | def create_tbb_web_headers ( ) : return Headers ( { b"User-Agent" : [ b"Mozilla/5.0 (Windows NT 6.1; rv:45.0) Gecko/20100101 Firefox/45.0" ] , b"Accept" : [ b"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" ] , b"Accept-Language" : [ b"en-US,en;q=0.5" ] , b"Accept-Encoding" : [ b"gzip, deflate" ] , } ) | Returns a new twisted . web . http_headers . Headers instance populated with tags to mimic Tor Browser . These include values for User - Agent Accept Accept - Language and Accept - Encoding . |
20,164 | def version_at_least ( version_string , major , minor , micro , patch ) : parts = re . match ( r'^([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+).*$' , version_string , ) for ver , gold in zip ( parts . group ( 1 , 2 , 3 , 4 ) , ( major , minor , micro , patch ) ) : if int ( ver ) < int ( gold ) : return False elif int ( ver ) > int ( gold ) : return True return True | This returns True if the version_string represents a Tor version of at least major . minor . micro . patch version ignoring any trailing specifiers . |
20,165 | def find_tor_binary ( globs = ( '/usr/sbin/' , '/usr/bin/' , '/Applications/TorBrowser_*.app/Contents/MacOS/' ) , system_tor = True ) : if system_tor : try : proc = subprocess . Popen ( ( 'which tor' ) , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) except OSError : pass else : stdout , _ = proc . communicate ( ) if proc . poll ( ) == 0 and stdout != '' : return stdout . strip ( ) for pattern in globs : for path in glob . glob ( pattern ) : torbin = os . path . join ( path , 'tor' ) if is_executable ( torbin ) : return torbin return None | Tries to find the tor executable using the shell first or in in the paths whose glob - patterns is in the given globs - tuple . |
20,166 | def maybe_ip_addr ( addr ) : if six . PY2 and isinstance ( addr , str ) : addr = unicode ( addr ) try : return ipaddress . ip_address ( addr ) except ValueError : pass return str ( addr ) | Tries to return an IPAddress otherwise returns a string . |
20,167 | def delete_file_or_tree ( * args ) : for f in args : try : os . unlink ( f ) except OSError : shutil . rmtree ( f , ignore_errors = True ) | For every path in args try to delete it as a file or a directory tree . Ignores deletion errors . |
20,168 | def available_tcp_port ( reactor ) : endpoint = serverFromString ( reactor , 'tcp:0:interface=127.0.0.1' ) port = yield endpoint . listen ( NoOpProtocolFactory ( ) ) address = port . getHost ( ) yield port . stopListening ( ) defer . returnValue ( address . port ) | Returns a Deferred firing an available TCP port on localhost . It does so by listening on port 0 ; then stopListening and fires the assigned port number . |
20,169 | def maybe_coroutine ( obj ) : if six . PY3 and asyncio . iscoroutine ( obj ) : return defer . ensureDeferred ( obj ) return obj | If obj is a coroutine and we re using Python3 wrap it in ensureDeferred . Otherwise return the original object . |
20,170 | def _is_non_public_numeric_address ( host ) : try : a = ipaddress . ip_address ( six . text_type ( host ) ) except ValueError : return False if a . is_loopback or a . is_multicast or a . is_private or a . is_reserved or a . is_unspecified : return True return False | returns True if host is not public |
20,171 | def _compute_permanent_id ( private_key ) : pub = private_key . public_key ( ) p = pub . public_bytes ( encoding = serialization . Encoding . PEM , format = serialization . PublicFormat . PKCS1 ) z = '' . join ( p . decode ( 'ascii' ) . strip ( ) . split ( '\n' ) [ 1 : - 1 ] ) b = base64 . b64decode ( z ) h1 = hashlib . new ( 'sha1' ) h1 . update ( b ) permanent_id = h1 . digest ( ) [ : 10 ] return base64 . b32encode ( permanent_id ) . lower ( ) . decode ( 'ascii' ) | Internal helper . Return an authenticated service s permanent ID given an RSA private key object . |
20,172 | def _validate_ports ( reactor , ports ) : if not isinstance ( ports , ( list , tuple ) ) : raise ValueError ( "'ports' must be a list of strings, ints or 2-tuples" ) processed_ports = [ ] for port in ports : if isinstance ( port , ( set , list , tuple ) ) : if len ( port ) != 2 : raise ValueError ( "'ports' must contain a single int or a 2-tuple of ints" ) remote , local = port try : remote = int ( remote ) except ValueError : raise ValueError ( "'ports' has a tuple with a non-integer " "component: {}" . format ( port ) ) try : local = int ( local ) except ValueError : if local . startswith ( 'unix:/' ) : pass else : if ':' not in local : raise ValueError ( "local port must be either an integer" " or start with unix:/ or be an IP:port" ) ip , port = local . split ( ':' ) if not _is_non_public_numeric_address ( ip ) : log . msg ( "'{}' used as onion port doesn't appear to be a " "local, numeric address" . format ( ip ) ) processed_ports . append ( "{} {}" . format ( remote , local ) ) else : processed_ports . append ( "{} 127.0.0.1:{}" . format ( remote , local ) ) elif isinstance ( port , ( six . text_type , str ) ) : _validate_single_port_string ( port ) processed_ports . append ( port ) else : try : remote = int ( port ) except ( ValueError , TypeError ) : raise ValueError ( "'ports' has a non-integer entry: {}" . format ( port ) ) local = yield available_tcp_port ( reactor ) processed_ports . append ( "{} 127.0.0.1:{}" . format ( remote , local ) ) defer . returnValue ( processed_ports ) | Internal helper for Onion services . Validates an incoming list of port mappings and returns a list of strings suitable for passing to other onion - services functions . |
20,173 | def handle ( self , data ) : if self . handler : state = self . handler ( data ) if state is None : return self . next_state return state return self . next_state | return next state . May override in a subclass to change behavior or pass a handler method to ctor |
20,174 | def pldist ( point , start , end ) : if np . all ( np . equal ( start , end ) ) : return np . linalg . norm ( point - start ) return np . divide ( np . abs ( np . linalg . norm ( np . cross ( end - start , start - point ) ) ) , np . linalg . norm ( end - start ) ) | Calculates the distance from point to the line given by the points start and end . |
20,175 | def rdp ( M , epsilon = 0 , dist = pldist , algo = "iter" , return_mask = False ) : if algo == "iter" : algo = partial ( rdp_iter , return_mask = return_mask ) elif algo == "rec" : if return_mask : raise NotImplementedError ( "return_mask=True not supported with algo=\"rec\"" ) algo = rdp_rec if "numpy" in str ( type ( M ) ) : return algo ( M , epsilon , dist ) return algo ( np . array ( M ) , epsilon , dist ) . tolist ( ) | Simplifies a given array of points using the Ramer - Douglas - Peucker algorithm . |
20,176 | def on_start_scene ( self , event : StartScene , signal : Callable [ [ Any ] , None ] ) : self . pause_scene ( ) self . start_scene ( event . new_scene , event . kwargs ) | Start a new scene . The current scene pauses . |
20,177 | def on_stop_scene ( self , event : events . StopScene , signal : Callable [ [ Any ] , None ] ) : self . stop_scene ( ) if self . current_scene is not None : signal ( events . SceneContinued ( ) ) else : signal ( events . Quit ( ) ) | Stop a running scene . If there s a scene on the stack it resumes . |
20,178 | def on_replace_scene ( self , event : events . ReplaceScene , signal ) : self . stop_scene ( ) self . start_scene ( event . new_scene , event . kwargs ) | Replace the running scene with a new one . |
20,179 | def register ( self , event_type : Union [ Type , _ellipsis ] , callback : Callable [ [ ] , Any ] ) : if not isinstance ( event_type , type ) and event_type is not ... : raise TypeError ( f"{type(self)}.register requires event_type to be a type." ) if not callable ( callback ) : raise TypeError ( f"{type(self)}.register requires callback to be callable." ) self . event_extensions [ event_type ] . append ( callback ) | Register a callback to be applied to an event at time of publishing . |
20,180 | def _build_index ( ) : global _module_file_index _module_file_index = { mod . __file__ : mod . __name__ for mod in sys . modules . values ( ) if hasattr ( mod , '__file__' ) and hasattr ( mod , '__name__' ) } | Rebuild _module_file_index from sys . modules |
20,181 | def logger ( self ) : frame = sys . _getframe ( 1 ) file_name = frame . f_code . co_filename module_name = _get_module ( file_name ) return logging . getLogger ( module_name ) | The logger for this class . |
20,182 | def pause ( self ) : if not self . _pause_level : self . _paused_time = self . _clock ( ) + self . _offset self . _paused_frame = self . current_frame self . _pause_level += 1 | Pause the animation . |
20,183 | def unpause ( self ) : self . _pause_level -= 1 if not self . _pause_level : self . _offset = self . _paused_time - self . _clock ( ) | Unpause the animation . |
20,184 | def run ( setup : Callable [ [ BaseScene ] , None ] = None , * , log_level = logging . WARNING , starting_scene = BaseScene ) : logging . basicConfig ( level = log_level ) kwargs = { "resolution" : ( 800 , 600 ) , "scene_kwargs" : { "set_up" : setup , } } with GameEngine ( starting_scene , ** kwargs ) as eng : eng . run ( ) | Run a small game . |
20,185 | def add ( self , game_object : Hashable , tags : Iterable [ Hashable ] = ( ) ) -> None : if isinstance ( tags , ( str , bytes ) ) : raise TypeError ( "You passed a string instead of an iterable, this probably isn't what you intended.\n\nTry making it a tuple." ) self . all . add ( game_object ) for kind in type ( game_object ) . mro ( ) : self . kinds [ kind ] . add ( game_object ) for tag in tags : self . tags [ tag ] . add ( game_object ) | Add a game_object to the container . |
20,186 | def get ( self , * , kind : Type = None , tag : Hashable = None , ** _ ) -> Iterator : if kind is None and tag is None : raise TypeError ( "get() takes at least one keyword-only argument. 'kind' or 'tag'." ) kinds = self . all tags = self . all if kind is not None : kinds = self . kinds [ kind ] if tag is not None : tags = self . tags [ tag ] return ( x for x in kinds . intersection ( tags ) ) | Get an iterator of objects by kind or tag . |
20,187 | def remove ( self , game_object : Hashable ) -> None : self . all . remove ( game_object ) for kind in type ( game_object ) . mro ( ) : self . kinds [ kind ] . remove ( game_object ) for s in self . tags . values ( ) : s . discard ( game_object ) | Remove the given object from the container . |
20,188 | def change ( self ) -> Tuple [ bool , dict ] : next = self . next self . next = None if self . next or not self . running : message = "The Scene.change interface is deprecated. Use the events commands instead." warn ( message , DeprecationWarning ) return self . running , { "scene_class" : next } | Default case override in subclass as necessary . |
20,189 | def add ( self , game_object : Hashable , tags : Iterable = ( ) ) -> None : self . game_objects . add ( game_object , tags ) | Add a game_object to the scene . |
20,190 | def get ( self , * , kind : Type = None , tag : Hashable = None , ** kwargs ) -> Iterator : return self . game_objects . get ( kind = kind , tag = tag , ** kwargs ) | Get an iterator of GameObjects by kind or tag . |
20,191 | def requirements ( section = None ) : if section is None : filename = "requirements.txt" else : filename = f"requirements-{section}.txt" with open ( filename ) as file : return [ line . strip ( ) for line in file ] | Helper for loading dependencies from requirements files . |
20,192 | def set_hash_key ( self , file ) : filehasher = hashlib . md5 ( ) while True : data = file . read ( 8192 ) if not data : break filehasher . update ( data ) file . seek ( 0 ) self . hash_key = filehasher . hexdigest ( ) | Calculate and store hash key for file . |
20,193 | def _box_in_box ( el , child ) : return all ( [ float ( el . get ( 'x0' ) ) <= float ( child . get ( 'x0' ) ) , float ( el . get ( 'x1' ) ) >= float ( child . get ( 'x1' ) ) , float ( el . get ( 'y0' ) ) <= float ( child . get ( 'y0' ) ) , float ( el . get ( 'y1' ) ) >= float ( child . get ( 'y1' ) ) , ] ) | Return True if child is contained within el . |
20,194 | def _comp_bbox ( el , el2 ) : if _comp_bbox_keys_required <= set ( el . keys ( ) ) and _comp_bbox_keys_required <= set ( el2 . keys ( ) ) : if _box_in_box ( el2 , el ) : return 1 if _box_in_box ( el , el2 ) : return - 1 return 0 | Return 1 if el in el2 - 1 if el2 in el else 0 |
20,195 | def get_pyquery ( self , tree = None , page_numbers = None ) : if not page_numbers : page_numbers = [ ] if tree is None : if not page_numbers and self . tree is not None : tree = self . tree else : tree = self . get_tree ( page_numbers ) if hasattr ( tree , 'getroot' ) : tree = tree . getroot ( ) return PyQuery ( tree , css_translator = PDFQueryTranslator ( ) ) | Wrap given tree in pyquery and return . If no tree supplied will generate one from given page_numbers or all page numbers . |
20,196 | def get_tree ( self , * page_numbers ) : cache_key = "_" . join ( map ( str , _flatten ( page_numbers ) ) ) tree = self . _parse_tree_cacher . get ( cache_key ) if tree is None : root = parser . makeelement ( "pdfxml" ) if self . doc . info : for k , v in list ( self . doc . info [ 0 ] . items ( ) ) : k = obj_to_string ( k ) v = obj_to_string ( resolve1 ( v ) ) try : root . set ( k , v ) except ValueError as e : if "Invalid attribute name" in e . args [ 0 ] : k = re . sub ( '\W' , '_' , k ) root . set ( k , v ) if not ( len ( page_numbers ) == 1 and page_numbers [ 0 ] is None ) : if page_numbers : pages = [ [ n , self . get_layout ( self . get_page ( n ) ) ] for n in _flatten ( page_numbers ) ] else : pages = enumerate ( self . get_layouts ( ) ) for n , page in pages : page = self . _xmlize ( page ) page . set ( 'page_index' , obj_to_string ( n ) ) page . set ( 'page_label' , self . doc . get_page_number ( n ) ) root . append ( page ) self . _clean_text ( root ) tree = etree . ElementTree ( root ) self . _parse_tree_cacher . set ( cache_key , tree ) return tree | Return lxml . etree . ElementTree for entire document or page numbers given if any . |
20,197 | def _clean_text ( self , branch ) : if branch . text and self . input_text_formatter : branch . text = self . input_text_formatter ( branch . text ) try : for child in branch : self . _clean_text ( child ) if branch . text and branch . text . find ( child . text ) >= 0 : branch . text = branch . text . replace ( child . text , '' , 1 ) except TypeError : pass | Remove text from node if same text exists in its children . Apply string formatter if set . |
20,198 | def get_layout ( self , page ) : if type ( page ) == int : page = self . get_page ( page ) self . interpreter . process_page ( page ) layout = self . device . get_result ( ) layout = self . _add_annots ( layout , page . annots ) return layout | Get PDFMiner Layout object for given page object or page number . |
20,199 | def _cached_pages ( self , target_page = - 1 ) : try : self . _pages_iter = self . _pages_iter or self . doc . get_pages ( ) except AttributeError : self . _pages_iter = self . _pages_iter or PDFPage . create_pages ( self . doc ) if target_page >= 0 : while len ( self . _pages ) <= target_page : next_page = next ( self . _pages_iter ) if not next_page : return None next_page . page_number = 0 self . _pages += [ next_page ] try : return self . _pages [ target_page ] except IndexError : return None self . _pages += list ( self . _pages_iter ) return self . _pages | Get a page or all pages from page generator caching results . This is necessary because PDFMiner searches recursively for pages so we won t know how many there are until we parse the whole document which we don t want to do until we need to . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.