idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
246,400
def bounds ( self ) : corners = [ self . image_corner ( corner ) for corner in self . corner_types ( ) ] return Polygon ( [ [ corner . x , corner . y ] for corner in corners ] )
Return image rectangle in pixels as shapely . Polygon .
49
12
246,401
def _calc_footprint ( self ) : corners = [ self . corner ( corner ) for corner in self . corner_types ( ) ] coords = [ ] for corner in corners : shape = corner . get_shape ( corner . crs ) coords . append ( [ shape . x , shape . y ] ) shp = Polygon ( coords ) # TODO use GeoVector.from_bounds self . _footprint = GeoVector ( shp , self . crs ) return self . _footprint
Return rectangle in world coordinates as GeoVector .
112
9
246,402
def to_raster ( self , vector ) : return transform ( vector . get_shape ( vector . crs ) , vector . crs , self . crs , dst_affine = ~ self . affine )
Return the vector in pixel coordinates as shapely . Geometry .
47
13
246,403
def reduce ( self , op ) : per_band = [ getattr ( np . ma , op ) ( self . image . data [ band , np . ma . getmaskarray ( self . image ) [ band , : , : ] == np . False_ ] ) for band in range ( self . num_bands ) ] return per_band
Reduce the raster to a score using op operation .
73
12
246,404
def mask ( self , vector , mask_shape_nodata = False ) : from telluric . collections import BaseCollection # crop raster to reduce memory footprint cropped = self . crop ( vector ) if isinstance ( vector , BaseCollection ) : shapes = [ cropped . to_raster ( feature ) for feature in vector ] else : shapes = [ cropped . to_raster ( vector ) ] mask = geometry_mask ( shapes , ( cropped . height , cropped . width ) , Affine . identity ( ) , invert = mask_shape_nodata ) masked = cropped . deepcopy_with ( ) masked . image . mask |= mask return masked
Set pixels outside vector as nodata .
141
8
246,405
def mask_by_value ( self , nodata ) : return self . copy_with ( image = np . ma . masked_array ( self . image . data , mask = self . image . data == nodata ) )
Return raster with a mask calculated based on provided value . Only pixels with value = nodata will be masked .
48
23
246,406
def save_cloud_optimized ( self , dest_url , resampling = Resampling . gauss , blocksize = 256 , overview_blocksize = 256 , creation_options = None ) : src = self # GeoRaster2.open(self._filename) with tempfile . NamedTemporaryFile ( suffix = '.tif' ) as tf : src . save ( tf . name , overviews = False ) convert_to_cog ( tf . name , dest_url , resampling , blocksize , overview_blocksize , creation_options ) geotiff = GeoRaster2 . open ( dest_url ) return geotiff
Save as Cloud Optimized GeoTiff object to a new file .
140
14
246,407
def _get_window_out_shape ( self , bands , window , xsize , ysize ) : if xsize and ysize is None : ratio = window . width / xsize ysize = math . ceil ( window . height / ratio ) elif ysize and xsize is None : ratio = window . height / ysize xsize = math . ceil ( window . width / ratio ) elif xsize is None and ysize is None : ysize = math . ceil ( window . height ) xsize = math . ceil ( window . width ) return ( len ( bands ) , ysize , xsize )
Get the outshape of a window .
135
8
246,408
def _read_with_mask ( raster , masked ) : if masked is None : mask_flags = raster . mask_flag_enums per_dataset_mask = all ( [ rasterio . enums . MaskFlags . per_dataset in flags for flags in mask_flags ] ) masked = per_dataset_mask return masked
returns if we should read from rasterio using the masked
79
13
246,409
def get_window ( self , window , bands = None , xsize = None , ysize = None , resampling = Resampling . cubic , masked = None , affine = None ) : bands = bands or list ( range ( 1 , self . num_bands + 1 ) ) # requested_out_shape and out_shape are different for out of bounds window out_shape = self . _get_window_out_shape ( bands , window , xsize , ysize ) try : read_params = { "window" : window , "resampling" : resampling , "boundless" : True , "out_shape" : out_shape , } # to handle get_window / get_tile of in memory rasters filename = self . _raster_backed_by_a_file ( ) . _filename with self . _raster_opener ( filename ) as raster : # type: rasterio.io.DatasetReader read_params [ "masked" ] = self . _read_with_mask ( raster , masked ) array = raster . read ( bands , * * read_params ) nodata = 0 if not np . ma . isMaskedArray ( array ) else None affine = affine or self . _calculate_new_affine ( window , out_shape [ 2 ] , out_shape [ 1 ] ) raster = self . copy_with ( image = array , affine = affine , nodata = nodata ) return raster except ( rasterio . errors . RasterioIOError , rasterio . _err . CPLE_HttpResponseError ) as e : raise GeoRaster2IOError ( e )
Get window from raster .
370
6
246,410
def _get_tile_when_web_mercator_crs ( self , x_tile , y_tile , zoom , bands = None , masked = None , resampling = Resampling . cubic ) : roi = GeoVector . from_xyz ( x_tile , y_tile , zoom ) coordinates = roi . get_bounds ( WEB_MERCATOR_CRS ) window = self . _window ( coordinates , to_round = False ) bands = bands or list ( range ( 1 , self . num_bands + 1 ) ) # we know the affine the result should produce becuase we know where # it is located by the xyz, therefore we calculate it here ratio = MERCATOR_RESOLUTION_MAPPING [ zoom ] / self . resolution ( ) # the affine should be calculated before rounding the window values affine = self . window_transform ( window ) affine = affine * Affine . scale ( ratio , ratio ) window = Window ( round ( window . col_off ) , round ( window . row_off ) , round ( window . width ) , round ( window . height ) ) return self . get_window ( window , bands = bands , xsize = 256 , ysize = 256 , masked = masked , affine = affine )
The reason we want to treat this case in a special way is that there are cases where the rater is aligned so you need to be precise on which raster you want
282
35
246,411
def get_tile ( self , x_tile , y_tile , zoom , bands = None , masked = None , resampling = Resampling . cubic ) : if self . crs == WEB_MERCATOR_CRS : return self . _get_tile_when_web_mercator_crs ( x_tile , y_tile , zoom , bands , masked , resampling ) roi = GeoVector . from_xyz ( x_tile , y_tile , zoom ) left , bottom , right , top = roi . get_bounds ( WEB_MERCATOR_CRS ) new_affine = rasterio . warp . calculate_default_transform ( WEB_MERCATOR_CRS , self . crs , 256 , 256 , left , bottom , right , top ) [ 0 ] new_resolution = resolution_from_affine ( new_affine ) buffer_ratio = int ( os . environ . get ( "TELLURIC_GET_TILE_BUFFER" , 10 ) ) roi_buffer = roi . buffer ( math . sqrt ( roi . area * buffer_ratio / 100 ) ) raster = self . crop ( roi_buffer , resolution = new_resolution , masked = masked , bands = bands , resampling = resampling ) raster = raster . reproject ( dst_crs = WEB_MERCATOR_CRS , resolution = MERCATOR_RESOLUTION_MAPPING [ zoom ] , dst_bounds = roi_buffer . get_bounds ( WEB_MERCATOR_CRS ) , resampling = Resampling . cubic_spline ) # raster = raster.get_tile(x_tile, y_tile, zoom, bands, masked, resampling) raster = raster . crop ( roi ) . resize ( dest_width = 256 , dest_height = 256 ) return raster
Convert mercator tile to raster window .
431
10
246,412
def colorize ( self , colormap , band_name = None , vmin = None , vmax = None ) : vmin = vmin if vmin is not None else min ( self . min ( ) ) vmax = vmax if vmax is not None else max ( self . max ( ) ) cmap = matplotlib . cm . get_cmap ( colormap ) # type: matplotlib.colors.Colormap band_index = 0 if band_name is None : if self . num_bands > 1 : warnings . warn ( "Using the first band to colorize the raster" , GeoRaster2Warning ) else : band_index = self . band_names . index ( band_name ) normalized = ( self . image [ band_index , : , : ] - vmin ) / ( vmax - vmin ) # Colormap instances are used to convert data values (floats) # to RGBA color that the respective Colormap # # https://matplotlib.org/_modules/matplotlib/colors.html#Colormap image_data = cmap ( normalized ) image_data = image_data [ : , : , 0 : 3 ] # convert floats [0,1] to uint8 [0,255] image_data = image_data * 255 image_data = image_data . astype ( np . uint8 ) image_data = np . rollaxis ( image_data , 2 ) # force nodata where it was in original raster: mask = _join_masks_from_masked_array ( self . image ) mask = np . stack ( [ mask [ 0 , : , : ] ] * 3 ) array = np . ma . array ( image_data . data , mask = mask ) . filled ( 0 ) # type: np.ndarray array = np . ma . array ( array , mask = mask ) return self . copy_with ( image = array , band_names = [ 'red' , 'green' , 'blue' ] )
Apply a colormap on a selected band .
441
10
246,413
def chunks ( self , shape = 256 , pad = False ) : _self = self . _raster_backed_by_a_file ( ) if isinstance ( shape , int ) : shape = ( shape , shape ) ( width , height ) = shape col_steps = int ( _self . width / width ) row_steps = int ( _self . height / height ) # when we the raster has an axis in which the shape is multipication # of the requested shape we don't need an extra step with window equal zero # in other cases we do need the extra step to get the reminder of the content col_extra_step = 1 if _self . width % width > 0 else 0 row_extra_step = 1 if _self . height % height > 0 else 0 for col_step in range ( 0 , col_steps + col_extra_step ) : col_off = col_step * width if not pad and col_step == col_steps : window_width = _self . width % width else : window_width = width for row_step in range ( 0 , row_steps + row_extra_step ) : row_off = row_step * height if not pad and row_step == row_steps : window_height = _self . height % height else : window_height = height window = Window ( col_off = col_off , row_off = row_off , width = window_width , height = window_height ) cur_raster = _self . get_window ( window ) yield RasterChunk ( raster = cur_raster , offsets = ( col_off , row_off ) )
This method returns GeoRaster chunks out of the original raster .
354
14
246,414
def dissolve ( collection , aggfunc = None ) : # type: (BaseCollection, Optional[Callable[[list], Any]]) -> GeoFeature new_properties = { } if aggfunc : temp_properties = defaultdict ( list ) # type: DefaultDict[Any, Any] for feature in collection : for key , value in feature . attributes . items ( ) : temp_properties [ key ] . append ( value ) for key , values in temp_properties . items ( ) : try : new_properties [ key ] = aggfunc ( values ) except Exception : # We just do not use these results pass return GeoFeature ( collection . cascaded_union , new_properties )
Dissolves features contained in a FeatureCollection and applies an aggregation function to its properties .
144
18
246,415
def filter ( self , intersects ) : try : crs = self . crs vector = intersects . geometry if isinstance ( intersects , GeoFeature ) else intersects prepared_shape = prep ( vector . get_shape ( crs ) ) hits = [ ] for feature in self : target_shape = feature . geometry . get_shape ( crs ) if prepared_shape . overlaps ( target_shape ) or prepared_shape . intersects ( target_shape ) : hits . append ( feature ) except IndexError : hits = [ ] return FeatureCollection ( hits )
Filter results that intersect a given GeoFeature or Vector .
122
11
246,416
def sort ( self , by , desc = False ) : if callable ( by ) : key = by else : def key ( feature ) : return feature [ by ] sorted_features = sorted ( list ( self ) , reverse = desc , key = key ) return self . __class__ ( sorted_features )
Sorts by given property or function ascending or descending order .
65
12
246,417
def groupby ( self , by ) : # type: (Union[str, Callable[[GeoFeature], str]]) -> _CollectionGroupBy results = OrderedDict ( ) # type: OrderedDict[str, list] for feature in self : if callable ( by ) : value = by ( feature ) else : value = feature [ by ] results . setdefault ( value , [ ] ) . append ( feature ) if hasattr ( self , "_schema" ) : # I am doing this to trick mypy, is there a better way? # calling self._schema generates a mypy problem schema = getattr ( self , "_schema" ) return _CollectionGroupBy ( results , schema = schema )
Groups collection using a value of a property .
156
10
246,418
def dissolve ( self , by = None , aggfunc = None ) : # type: (Optional[str], Optional[Callable]) -> FeatureCollection if by : agg = partial ( dissolve , aggfunc = aggfunc ) # type: Callable[[BaseCollection], GeoFeature] return self . groupby ( by ) . agg ( agg ) else : return FeatureCollection ( [ dissolve ( self , aggfunc ) ] )
Dissolve geometries and rasters within groupby .
87
13
246,419
def rasterize ( self , dest_resolution , * , polygonize_width = 0 , crs = WEB_MERCATOR_CRS , fill_value = None , bounds = None , dtype = None , * * polygonize_kwargs ) : # Avoid circular imports from telluric . georaster import merge_all , MergeStrategy from telluric . rasterization import rasterize , NODATA_DEPRECATION_WARNING # Compute the size in real units and polygonize the features if not isinstance ( polygonize_width , int ) : raise TypeError ( "The width in pixels must be an integer" ) if polygonize_kwargs . pop ( "nodata_value" , None ) : warnings . warn ( NODATA_DEPRECATION_WARNING , DeprecationWarning ) # If the pixels width is 1, render points as squares to avoid missing data if polygonize_width == 1 : polygonize_kwargs . update ( cap_style_point = CAP_STYLE . square ) # Reproject collection to target CRS if ( self . crs is not None and self . crs != crs ) : reprojected = self . reproject ( crs ) else : reprojected = self width = polygonize_width * dest_resolution polygonized = [ feature . polygonize ( width , * * polygonize_kwargs ) for feature in reprojected ] # Discard the empty features shapes = [ feature . geometry . get_shape ( crs ) for feature in polygonized if not feature . is_empty ] if bounds is None : bounds = self . envelope if bounds . area == 0.0 : raise ValueError ( "Specify non-empty ROI" ) if not len ( self ) : fill_value = None if callable ( fill_value ) : if dtype is None : raise ValueError ( "dtype must be specified for multivalue rasterization" ) rasters = [ ] for feature in self : rasters . append ( feature . geometry . rasterize ( dest_resolution , fill_value = fill_value ( feature ) , bounds = bounds , dtype = dtype , crs = crs ) ) return merge_all ( rasters , bounds . reproject ( crs ) , dest_resolution , merge_strategy = MergeStrategy . INTERSECTION ) else : return rasterize ( shapes , crs , bounds . get_shape ( crs ) , dest_resolution , fill_value = fill_value , dtype = dtype )
Binarize a FeatureCollection and produce a raster with the target resolution .
558
16
246,420
def save ( self , filename , driver = None , schema = None ) : if driver is None : driver = DRIVERS . get ( os . path . splitext ( filename ) [ - 1 ] ) if schema is None : schema = self . schema if driver == "GeoJSON" : # Workaround for https://github.com/Toblerity/Fiona/issues/438 # https://stackoverflow.com/a/27045091/554319 with contextlib . suppress ( FileNotFoundError ) : os . remove ( filename ) crs = WGS84_CRS else : crs = self . crs with fiona . open ( filename , 'w' , driver = driver , schema = schema , crs = crs ) as sink : for feature in self : new_feature = self . _adapt_feature_before_write ( feature ) sink . write ( new_feature . to_record ( crs ) )
Saves collection to file .
205
6
246,421
def apply ( self , * * kwargs ) : def _apply ( f ) : properties = copy . deepcopy ( f . properties ) for prop , value in kwargs . items ( ) : if callable ( value ) : properties [ prop ] = value ( f ) else : properties [ prop ] = value return f . copy_with ( properties = properties ) new_fc = self . map ( _apply ) new_schema = self . schema . copy ( ) property_names_set = kwargs . keys ( ) prop_types_map = FeatureCollection . guess_types_by_feature ( new_fc [ 0 ] , property_names_set ) for key , value_type in prop_types_map . items ( ) : # already defined attribute that we just override will have the same position as before # new attributes will be appened new_schema [ "properties" ] [ key ] = FIELD_TYPES_MAP_REV . get ( value_type , 'str' ) new_fc . _schema = new_schema return new_fc
Return a new FeatureCollection with the results of applying the statements in the arguments to each element .
234
19
246,422
def validate ( self ) : if self . _schema is not None : with MemoryFile ( ) as memfile : with memfile . open ( driver = "ESRI Shapefile" , schema = self . schema ) as target : for _item in self . _results : # getting rid of the assets that don't behave well becasue of in memroy rasters item = GeoFeature ( _item . geometry , _item . properties ) target . write ( item . to_record ( item . crs ) )
if schema exists we run shape file validation code of fiona by trying to save to in MemoryFile
109
20
246,423
def open ( cls , filename , crs = None ) : with fiona . Env ( ) : with fiona . open ( filename , 'r' ) as source : original_crs = CRS ( source . crs ) schema = source . schema length = len ( source ) crs = crs or original_crs ret_val = cls ( filename , crs , schema , length ) return ret_val
Creates a FileCollection from a file in disk .
92
11
246,424
def filter ( self , func ) : # type: (Callable[[BaseCollection], bool]) -> _CollectionGroupBy results = OrderedDict ( ) # type: OrderedDict for name , group in self : if func ( group ) : results [ name ] = group return self . __class__ ( results )
Filter out Groups based on filtering function .
68
8
246,425
def reset_context ( * * options ) : local_context . _options = { } local_context . _options . update ( options ) log . debug ( "New TelluricContext context %r created" , local_context . _options )
Reset context to default .
53
6
246,426
def get_context ( ) : if not local_context . _options : raise TelluricContextError ( "TelluricContext context not exists" ) else : log . debug ( "Got a copy of context %r options" , local_context . _options ) return local_context . _options . copy ( )
Get a mapping of current options .
68
7
246,427
def set_context ( * * options ) : if not local_context . _options : raise TelluricContextError ( "TelluricContext context not exists" ) else : local_context . _options . update ( options ) log . debug ( "Updated existing %r with options %r" , local_context . _options , options )
Set options in the existing context .
73
7
246,428
def transform_properties ( properties , schema ) : new_properties = properties . copy ( ) for prop_value , ( prop_name , prop_type ) in zip ( new_properties . values ( ) , schema [ "properties" ] . items ( ) ) : if prop_value is None : continue elif prop_type == "time" : new_properties [ prop_name ] = parse_date ( prop_value ) . time ( ) elif prop_type == "date" : new_properties [ prop_name ] = parse_date ( prop_value ) . date ( ) elif prop_type == "datetime" : new_properties [ prop_name ] = parse_date ( prop_value ) return new_properties
Transform properties types according to a schema .
158
8
246,429
def serialize_properties ( properties ) : new_properties = properties . copy ( ) for attr_name , attr_value in new_properties . items ( ) : if isinstance ( attr_value , datetime ) : new_properties [ attr_name ] = attr_value . isoformat ( ) elif not isinstance ( attr_value , ( dict , list , tuple , str , int , float , bool , type ( None ) ) ) : # Property is not JSON-serializable according to this table # https://docs.python.org/3.4/library/json.html#json.JSONEncoder # so we convert to string new_properties [ attr_name ] = str ( attr_value ) return new_properties
Serialize properties .
165
4
246,430
def from_record ( cls , record , crs , schema = None ) : properties = cls . _to_properties ( record , schema ) vector = GeoVector ( shape ( record [ 'geometry' ] ) , crs ) if record . get ( 'raster' ) : assets = { k : dict ( type = RASTER_TYPE , product = 'visual' , * * v ) for k , v in record . get ( 'raster' ) . items ( ) } else : assets = record . get ( 'assets' , { } ) return cls ( vector , properties , assets )
Create GeoFeature from a record .
131
7
246,431
def copy_with ( self , geometry = None , properties = None , assets = None ) : def copy_assets_object ( asset ) : obj = asset . get ( "__object" ) if hasattr ( "copy" , obj ) : new_obj = obj . copy ( ) if obj : asset [ "__object" ] = new_obj geometry = geometry or self . geometry . copy ( ) new_properties = copy . deepcopy ( self . properties ) if properties : new_properties . update ( properties ) if not assets : assets = copy . deepcopy ( self . assets ) map ( copy_assets_object , assets . values ( ) ) else : assets = { } return self . __class__ ( geometry , new_properties , assets )
Generate a new GeoFeature with different geometry or preperties .
160
13
246,432
def from_raster ( cls , raster , properties , product = 'visual' ) : footprint = raster . footprint ( ) assets = raster . to_assets ( product = product ) return cls ( footprint , properties , assets )
Initialize a GeoFeature object with a GeoRaster
52
11
246,433
def has_raster ( self ) : return any ( asset . get ( 'type' ) == RASTER_TYPE for asset in self . assets . values ( ) )
True if any of the assets is type raster .
37
11
246,434
def transform ( shape , source_crs , destination_crs = None , src_affine = None , dst_affine = None ) : if destination_crs is None : destination_crs = WGS84_CRS if src_affine is not None : shape = ops . transform ( lambda r , q : ~ src_affine * ( r , q ) , shape ) shape = generate_transform ( source_crs , destination_crs ) ( shape ) if dst_affine is not None : shape = ops . transform ( lambda r , q : dst_affine * ( r , q ) , shape ) return shape
Transforms shape from one CRS to another .
139
10
246,435
def simple_plot ( feature , * , mp = None , * * map_kwargs ) : # This import is here to avoid cyclic references from telluric . collections import BaseCollection if mp is None : mp = folium . Map ( tiles = "Stamen Terrain" , * * map_kwargs ) if feature . is_empty : warnings . warn ( "The geometry is empty." ) else : if isinstance ( feature , BaseCollection ) : feature = feature [ : SIMPLE_PLOT_MAX_ROWS ] folium . GeoJson ( mapping ( feature ) , name = 'geojson' , overlay = True ) . add_to ( mp ) shape = feature . envelope . get_shape ( WGS84_CRS ) mp . fit_bounds ( [ shape . bounds [ : 1 : - 1 ] , shape . bounds [ 1 : : - 1 ] ] ) return mp
Plots a GeoVector in a simple Folium map .
195
12
246,436
def zoom_level_from_geometry ( geometry , splits = 4 ) : # This import is here to avoid cyclic references from telluric . vectors import generate_tile_coordinates # We split the geometry and compute the zoom level for each chunk levels = [ ] for chunk in generate_tile_coordinates ( geometry , ( splits , splits ) ) : levels . append ( mercantile . bounding_tile ( * chunk . get_shape ( WGS84_CRS ) . bounds ) . z ) # We now return the median value using the median_low function, which # always picks the result from the list return median_low ( levels )
Generate optimum zoom level for geometry .
139
8
246,437
def layer_from_element ( element , style_function = None ) : # This import is here to avoid cyclic references from telluric . collections import BaseCollection if isinstance ( element , BaseCollection ) : styled_element = element . map ( lambda feat : style_element ( feat , style_function ) ) else : styled_element = style_element ( element , style_function ) return GeoJSON ( data = mapping ( styled_element ) , name = 'GeoJSON' )
Return Leaflet layer from shape .
104
7
246,438
def plot ( feature , mp = None , style_function = None , * * map_kwargs ) : map_kwargs . setdefault ( 'basemap' , basemaps . Stamen . Terrain ) if feature . is_empty : warnings . warn ( "The geometry is empty." ) mp = Map ( * * map_kwargs ) if mp is None else mp else : if mp is None : center = feature . envelope . centroid . reproject ( WGS84_CRS ) zoom = zoom_level_from_geometry ( feature . envelope ) mp = Map ( center = ( center . y , center . x ) , zoom = zoom , * * map_kwargs ) mp . add_layer ( layer_from_element ( feature , style_function ) ) return mp
Plots a GeoVector in an ipyleaflet map .
171
13
246,439
def tileserver_optimized_raster ( src , dest ) : src_raster = tl . GeoRaster2 . open ( src ) bounding_box = src_raster . footprint ( ) . get_shape ( tl . constants . WGS84_CRS ) . bounds tile = mercantile . bounding_tile ( * bounding_box ) dest_resolution = mercator_upper_zoom_level ( src_raster ) bounds = tl . GeoVector . from_xyz ( tile . x , tile . y , tile . z ) . get_bounds ( tl . constants . WEB_MERCATOR_CRS ) create_options = { "tiled" : "YES" , "blocksize" : 256 , "compress" : "DEFLATE" , "photometric" : "MINISBLACK" } with TemporaryDirectory ( ) as temp_dir : temp_file = os . path . join ( temp_dir , 'temp.tif' ) warp ( src , temp_file , dst_crs = tl . constants . WEB_MERCATOR_CRS , resolution = dest_resolution , dst_bounds = bounds , create_options = create_options ) with rasterio . Env ( GDAL_TIFF_INTERNAL_MASK = True , GDAL_TIFF_OVR_BLOCKSIZE = 256 ) : resampling = rasterio . enums . Resampling . gauss with rasterio . open ( temp_file , 'r+' ) as tmp_raster : factors = _calc_overviews_factors ( tmp_raster ) tmp_raster . build_overviews ( factors , resampling = resampling ) tmp_raster . update_tags ( ns = 'rio_overview' , resampling = resampling . name ) telluric_tags = _get_telluric_tags ( src ) if telluric_tags : tmp_raster . update_tags ( * * telluric_tags ) rasterio_sh . copy ( temp_file , dest , COPY_SRC_OVERVIEWS = True , tiled = True , compress = 'DEFLATE' , photometric = 'MINISBLACK' )
This method converts a raster to a tileserver optimized raster . The method will reproject the raster to align to the xyz system in resolution and projection It will also create overviews And finally it will arragne the raster in a cog way . You could take the dest file upload it to a web server that supports ranges and user GeoRaster . get_tile on it You are geranteed that you will get as minimal data as possible
502
93
246,440
def get_dimension ( geometry ) : coordinates = geometry [ "coordinates" ] type_ = geometry [ "type" ] if type_ in ( 'Point' , ) : return len ( coordinates ) elif type_ in ( 'LineString' , 'MultiPoint' ) : return len ( coordinates [ 0 ] ) elif type_ in ( 'Polygon' , 'MultiLineString' ) : return len ( coordinates [ 0 ] [ 0 ] ) elif type_ in ( 'MultiPolygon' , ) : return len ( coordinates [ 0 ] [ 0 ] [ 0 ] ) else : raise ValueError ( "Invalid type '{}'" . format ( type_ ) )
Gets the dimension of a Fiona - like geometry element .
144
12
246,441
def from_geojson ( cls , filename ) : with open ( filename ) as fd : geometry = json . load ( fd ) if 'type' not in geometry : raise TypeError ( "%s is not a valid geojson." % ( filename , ) ) return cls ( to_shape ( geometry ) , WGS84_CRS )
Load vector from geojson .
77
7
246,442
def to_geojson ( self , filename ) : with open ( filename , 'w' ) as fd : json . dump ( self . to_record ( WGS84_CRS ) , fd )
Save vector as geojson .
46
7
246,443
def from_bounds ( cls , xmin , ymin , xmax , ymax , crs = DEFAULT_CRS ) : return cls ( Polygon . from_bounds ( xmin , ymin , xmax , ymax ) , crs )
Creates GeoVector object from bounds .
59
8
246,444
def from_xyz ( cls , x , y , z ) : bb = xy_bounds ( x , y , z ) return cls . from_bounds ( xmin = bb . left , ymin = bb . bottom , xmax = bb . right , ymax = bb . top , crs = WEB_MERCATOR_CRS )
Creates GeoVector from Mercator slippy map values .
85
12
246,445
def cascaded_union ( cls , vectors , dst_crs , prevalidate = False ) : # type: (list, CRS, bool) -> GeoVector try : shapes = [ geometry . get_shape ( dst_crs ) for geometry in vectors ] if prevalidate : if not all ( [ sh . is_valid for sh in shapes ] ) : warnings . warn ( "Some invalid shapes found, discarding them." ) except IndexError : crs = DEFAULT_CRS shapes = [ ] return cls ( cascaded_union ( [ sh for sh in shapes if sh . is_valid ] ) . simplify ( 0 ) , crs = dst_crs )
Generate a GeoVector from the cascade union of the impute vectors .
146
15
246,446
def from_record ( cls , record , crs ) : if 'type' not in record : raise TypeError ( "The data isn't a valid record." ) return cls ( to_shape ( record ) , crs )
Load vector from record .
50
5
246,447
def get_bounding_box ( self , crs ) : return self . from_bounds ( * self . get_bounds ( crs ) , crs = crs )
Gets bounding box as GeoVector in a specified CRS .
40
14
246,448
def polygonize ( self , width , cap_style_line = CAP_STYLE . flat , cap_style_point = CAP_STYLE . round ) : shape = self . _shape if isinstance ( shape , ( LineString , MultiLineString ) ) : return self . __class__ ( shape . buffer ( width / 2 , cap_style = cap_style_line ) , self . crs ) elif isinstance ( shape , ( Point , MultiPoint ) ) : return self . __class__ ( shape . buffer ( width / 2 , cap_style = cap_style_point ) , self . crs ) else : return self
Turns line or point into a buffered polygon .
141
12
246,449
def tiles ( self , zooms , truncate = False ) : west , south , east , north = self . get_bounds ( WGS84_CRS ) return tiles ( west , south , east , north , zooms , truncate )
Iterator over the tiles intersecting the bounding box of the vector
53
13
246,450
def _join_masks_from_masked_array ( data ) : if not isinstance ( data . mask , np . ndarray ) : # workaround to handle mask compressed to single value mask = np . empty ( data . data . shape , dtype = np . bool ) mask . fill ( data . mask ) return mask mask = data . mask [ 0 ] . copy ( ) for i in range ( 1 , len ( data . mask ) ) : mask = np . logical_or ( mask , data . mask [ i ] ) return mask [ np . newaxis , : , : ]
Union of masks .
127
4
246,451
def _creation_options_for_cog ( creation_options , source_profile , blocksize ) : if not ( creation_options ) : creation_options = { } creation_options [ "blocksize" ] = blocksize creation_options [ "tiled" ] = True defaults = { "nodata" : None , "compress" : "lzw" } for key in [ "nodata" , "compress" ] : if key not in creation_options : creation_options [ key ] = source_profile . get ( key , defaults . get ( key ) ) return creation_options
it uses the profile of the source raster override anything using the creation_options and guarantees we will have tiled raster and blocksize
132
28
246,452
def convert_to_cog ( source_file , destination_file , resampling = rasterio . enums . Resampling . gauss , blocksize = 256 , overview_blocksize = 256 , creation_options = None ) : with rasterio . open ( source_file ) as src : # creation_options overrides proile source_profile = src . profile creation_options = _creation_options_for_cog ( creation_options , source_profile , blocksize ) with rasterio . Env ( GDAL_TIFF_INTERNAL_MASK = True , GDAL_TIFF_OVR_BLOCKSIZE = overview_blocksize ) : with TemporaryDirectory ( ) as temp_dir : temp_file = os . path . join ( temp_dir , 'temp.tif' ) rasterio_sh . copy ( source_file , temp_file , * * creation_options ) with rasterio . open ( temp_file , 'r+' ) as dest : factors = _calc_overviews_factors ( dest ) dest . build_overviews ( factors , resampling = resampling ) dest . update_tags ( ns = 'rio_overview' , resampling = resampling . name ) telluric_tags = _get_telluric_tags ( source_file ) if telluric_tags : dest . update_tags ( * * telluric_tags ) rasterio_sh . copy ( temp_file , destination_file , COPY_SRC_OVERVIEWS = True , * * creation_options )
Convert source file to a Cloud Optimized GeoTiff new file .
350
15
246,453
def warp ( source_file , destination_file , dst_crs = None , resolution = None , dimensions = None , src_bounds = None , dst_bounds = None , src_nodata = None , dst_nodata = None , target_aligned_pixels = False , check_invert_proj = True , creation_options = None , resampling = Resampling . cubic , * * kwargs ) : with rasterio . Env ( CHECK_WITH_INVERT_PROJ = check_invert_proj ) : with rasterio . open ( source_file ) as src : out_kwargs = src . profile . copy ( ) dst_crs , dst_transform , dst_width , dst_height = calc_transform ( src , dst_crs , resolution , dimensions , src_bounds , dst_bounds , target_aligned_pixels ) # If src_nodata is not None, update the dst metadata NODATA # value to src_nodata (will be overridden by dst_nodata if it is not None. if src_nodata is not None : # Update the destination NODATA value out_kwargs . update ( { 'nodata' : src_nodata } ) # Validate a manually set destination NODATA value. if dst_nodata is not None : if src_nodata is None and src . meta [ 'nodata' ] is None : raise ValueError ( 'src_nodata must be provided because dst_nodata is not None' ) else : out_kwargs . update ( { 'nodata' : dst_nodata } ) out_kwargs . update ( { 'crs' : dst_crs , 'transform' : dst_transform , 'width' : dst_width , 'height' : dst_height } ) # Adjust block size if necessary. if ( 'blockxsize' in out_kwargs and dst_width < out_kwargs [ 'blockxsize' ] ) : del out_kwargs [ 'blockxsize' ] if ( 'blockysize' in out_kwargs and dst_height < out_kwargs [ 'blockysize' ] ) : del out_kwargs [ 'blockysize' ] if creation_options is not None : out_kwargs . update ( * * creation_options ) with rasterio . open ( destination_file , 'w' , * * out_kwargs ) as dst : reproject ( source = rasterio . band ( src , src . indexes ) , destination = rasterio . band ( dst , dst . indexes ) , src_transform = src . transform , src_crs = src . crs , src_nodata = src_nodata , dst_transform = out_kwargs [ 'transform' ] , dst_crs = out_kwargs [ 'crs' ] , dst_nodata = dst_nodata , resampling = resampling , * * kwargs )
Warp a raster dataset .
674
7
246,454
def build_overviews ( source_file , factors = None , minsize = 256 , external = False , blocksize = 256 , interleave = 'pixel' , compress = 'lzw' , resampling = Resampling . gauss , * * kwargs ) : with rasterio . open ( source_file , 'r+' ) as dst : if factors is None : factors = _calc_overviews_factors ( SimpleNamespace ( width = dst . width , height = dst . height ) , minsize ) with rasterio . Env ( GDAL_TIFF_OVR_BLOCKSIZE = blocksize , INTERLEAVE_OVERVIEW = interleave , COMPRESS_OVERVIEW = compress , TIFF_USE_OVR = external , * * kwargs ) : dst . build_overviews ( factors , resampling )
Build overviews at one or more decimation factors for all bands of the dataset .
190
17
246,455
def build_vrt ( source_file , destination_file , * * kwargs ) : with rasterio . open ( source_file ) as src : vrt_doc = boundless_vrt_doc ( src , * * kwargs ) . tostring ( ) with open ( destination_file , 'wb' ) as dst : dst . write ( vrt_doc ) return destination_file
Make a VRT XML document and write it in file .
88
12
246,456
def stretch_histogram ( img , dark_clip_percentile = None , bright_clip_percentile = None , dark_clip_value = None , bright_clip_value = None , ignore_zero = True ) : # verify stretching method is specified: if ( dark_clip_percentile is not None and dark_clip_value is not None ) or ( bright_clip_percentile is not None and bright_clip_value is not None ) : raise KeyError ( 'Provided parameters for both by-percentile and by-value stretch, need only one of those.' ) # the default stretching: if dark_clip_percentile is None and dark_clip_value is None : dark_clip_percentile = 0.001 if bright_clip_percentile is None and bright_clip_value is None : bright_clip_percentile = 0.001 if dark_clip_percentile is not None : dark_clip_value = np . percentile ( img [ img != 0 ] if ignore_zero else img , 100 * dark_clip_percentile ) if bright_clip_percentile is not None : bright_clip_value = np . percentile ( img [ img != 0 ] if ignore_zero else img , 100 * ( 1 - bright_clip_percentile ) ) dst_min = np . iinfo ( img . dtype ) . min dst_max = np . iinfo ( img . dtype ) . max if bright_clip_value == dark_clip_value : raise HistogramStretchingError gain = ( dst_max - dst_min ) / ( bright_clip_value - dark_clip_value ) offset = - gain * dark_clip_value + dst_min stretched = np . empty_like ( img , dtype = img . dtype ) if len ( img . shape ) == 2 : stretched [ : , : ] = np . clip ( gain * img [ : , : ] . astype ( np . float32 ) + offset , dst_min , dst_max ) . astype ( img . dtype ) else : for band in range ( img . shape [ 0 ] ) : stretched [ band , : , : ] = np . clip ( gain * img [ band , : , : ] . astype ( np . float32 ) + offset , dst_min , dst_max ) . astype ( img . dtype ) return stretched
Stretch img histogram .
513
5
246,457
def _distribution_info ( self ) : print ( 'Gathering information...' ) system = platform . system ( ) # Cygwin system is CYGWIN-NT-xxx. system = 'cygwin' if 'CYGWIN' in system else system processor = platform . processor ( ) machine = '64bit' if sys . maxsize > 2 ** 32 else '32bit' print ( 'SYSTEM: ' , system ) print ( 'PROCESSOR:' , processor ) print ( 'MACHINE: ' , machine ) return self . _dists [ ( system , machine ) ]
Creates the distribution name and the expected extension for the CSPICE package and returns it .
130
19
246,458
def _download ( self ) : # Use urllib3 (based on PyOpenSSL). if ssl . OPENSSL_VERSION < 'OpenSSL 1.0.1g' : # Force urllib3 to use pyOpenSSL import urllib3 . contrib . pyopenssl urllib3 . contrib . pyopenssl . inject_into_urllib3 ( ) import certifi import urllib3 try : # Search proxy in ENV variables proxies = { } for key , value in os . environ . items ( ) : if '_proxy' in key . lower ( ) : proxies [ key . lower ( ) . replace ( '_proxy' , '' ) ] = value # Create a ProolManager if 'https' in proxies : https = urllib3 . ProxyManager ( proxies [ 'https' ] , cert_reqs = 'CERT_REQUIRED' , ca_certs = certifi . where ( ) ) elif 'http' in proxies : https = urllib3 . ProxyManager ( proxies [ 'http' ] , cert_reqs = 'CERT_REQUIRED' , ca_certs = certifi . where ( ) ) else : https = urllib3 . PoolManager ( cert_reqs = 'CERT_REQUIRED' , ca_certs = certifi . where ( ) ) # Send the request to get the CSPICE package. response = https . request ( 'GET' , self . _rcspice , timeout = urllib3 . Timeout ( 10 ) ) except urllib3 . exceptions . HTTPError as err : raise RuntimeError ( err . message ) # Convert the response to io.BytesIO and store it in local memory. self . _local = io . BytesIO ( response . data ) # Use the standard urllib (using system OpenSSL). else : try : # Send the request to get the CSPICE package (proxy auto detected). response = urllib . request . urlopen ( self . _rcspice , timeout = 10 ) except urllib . error . URLError as err : raise RuntimeError ( err . reason ) # Convert the response to io.BytesIO and store it in local memory. self . _local = io . BytesIO ( response . read ( ) )
Support function that encapsulates the OpenSSL transfer of the CSPICE package to the self . _local io . ByteIO stream .
501
27
246,459
def _unpack ( self ) : if self . _ext == 'zip' : with ZipFile ( self . _local , 'r' ) as archive : archive . extractall ( self . _root ) else : cmd = 'gunzip | tar xC ' + self . _root proc = subprocess . Popen ( cmd , shell = True , stdin = subprocess . PIPE ) proc . stdin . write ( self . _local . read ( ) ) self . _local . close ( )
Unpacks the CSPICE package on the given root directory . Note that Package could either be the zipfile . ZipFile class for Windows platforms or tarfile . TarFile for other platforms .
109
39
246,460
def spiceErrorCheck ( f ) : @ functools . wraps ( f ) def with_errcheck ( * args , * * kwargs ) : try : res = f ( * args , * * kwargs ) checkForSpiceError ( f ) return res except : raise return with_errcheck
Decorator for spiceypy hooking into spice error system . If an error is detected an output similar to outmsg
66
25
246,461
def spiceFoundExceptionThrower ( f ) : @ functools . wraps ( f ) def wrapper ( * args , * * kwargs ) : res = f ( * args , * * kwargs ) if config . catch_false_founds : found = res [ - 1 ] if isinstance ( found , bool ) and not found : raise stypes . SpiceyError ( "Spice returns not found for function: {}" . format ( f . __name__ ) , found = found ) elif hasattr ( found , '__iter__' ) and not all ( found ) : raise stypes . SpiceyError ( "Spice returns not found in a series of calls for function: {}" . format ( f . __name__ ) , found = found ) else : actualres = res [ 0 : - 1 ] if len ( actualres ) == 1 : return actualres [ 0 ] else : return actualres else : return res return wrapper
Decorator for wrapping functions that use status codes
203
10
246,462
def appndc ( item , cell ) : assert isinstance ( cell , stypes . SpiceCell ) if isinstance ( item , list ) : for c in item : libspice . appndc_c ( stypes . stringToCharP ( c ) , cell ) else : item = stypes . stringToCharP ( item ) libspice . appndc_c ( item , cell )
Append an item to a character cell .
87
9
246,463
def appndd ( item , cell ) : assert isinstance ( cell , stypes . SpiceCell ) if hasattr ( item , "__iter__" ) : for d in item : libspice . appndd_c ( ctypes . c_double ( d ) , cell ) else : item = ctypes . c_double ( item ) libspice . appndd_c ( item , cell )
Append an item to a double precision cell .
89
10
246,464
def appndi ( item , cell ) : assert isinstance ( cell , stypes . SpiceCell ) if hasattr ( item , "__iter__" ) : for i in item : libspice . appndi_c ( ctypes . c_int ( i ) , cell ) else : item = ctypes . c_int ( item ) libspice . appndi_c ( item , cell )
Append an item to an integer cell .
89
9
246,465
def axisar ( axis , angle ) : axis = stypes . toDoubleVector ( axis ) angle = ctypes . c_double ( angle ) r = stypes . emptyDoubleMatrix ( ) libspice . axisar_c ( axis , angle , r ) return stypes . cMatrixToNumpy ( r )
Construct a rotation matrix that rotates vectors by a specified angle about a specified axis .
68
17
246,466
def badkpv ( caller , name , comp , insize , divby , intype ) : caller = stypes . stringToCharP ( caller ) name = stypes . stringToCharP ( name ) comp = stypes . stringToCharP ( comp ) insize = ctypes . c_int ( insize ) divby = ctypes . c_int ( divby ) intype = ctypes . c_char ( intype . encode ( encoding = 'UTF-8' ) ) return bool ( libspice . badkpv_c ( caller , name , comp , insize , divby , intype ) )
Determine if a kernel pool variable is present and if so that it has the correct size and type .
138
22
246,467
def bltfrm ( frmcls , outCell = None ) : frmcls = ctypes . c_int ( frmcls ) if not outCell : outCell = stypes . SPICEINT_CELL ( 1000 ) libspice . bltfrm_c ( frmcls , outCell ) return outCell
Return a SPICE set containing the frame IDs of all built - in frames of a specified class .
76
20
246,468
def bodc2n ( code , lenout = _default_len_out ) : code = ctypes . c_int ( code ) name = stypes . stringToCharP ( " " * lenout ) lenout = ctypes . c_int ( lenout ) found = ctypes . c_int ( ) libspice . bodc2n_c ( code , lenout , name , ctypes . byref ( found ) ) return stypes . toPythonString ( name ) , bool ( found . value )
Translate the SPICE integer code of a body into a common name for that body .
113
18
246,469
def bodc2s ( code , lenout = _default_len_out ) : code = ctypes . c_int ( code ) name = stypes . stringToCharP ( " " * lenout ) lenout = ctypes . c_int ( lenout ) libspice . bodc2s_c ( code , lenout , name ) return stypes . toPythonString ( name )
Translate a body ID code to either the corresponding name or if no name to ID code mapping exists the string representation of the body ID value .
87
29
246,470
def bodfnd ( body , item ) : body = ctypes . c_int ( body ) item = stypes . stringToCharP ( item ) return bool ( libspice . bodfnd_c ( body , item ) )
Determine whether values exist for some item for any body in the kernel pool .
51
17
246,471
def bodn2c ( name ) : name = stypes . stringToCharP ( name ) code = ctypes . c_int ( 0 ) found = ctypes . c_int ( 0 ) libspice . bodn2c_c ( name , ctypes . byref ( code ) , ctypes . byref ( found ) ) return code . value , bool ( found . value )
Translate the name of a body or object to the corresponding SPICE integer ID code .
85
18
246,472
def bods2c ( name ) : name = stypes . stringToCharP ( name ) code = ctypes . c_int ( 0 ) found = ctypes . c_int ( 0 ) libspice . bods2c_c ( name , ctypes . byref ( code ) , ctypes . byref ( found ) ) return code . value , bool ( found . value )
Translate a string containing a body name or ID code to an integer code .
85
16
246,473
def bodvcd ( bodyid , item , maxn ) : bodyid = ctypes . c_int ( bodyid ) item = stypes . stringToCharP ( item ) dim = ctypes . c_int ( ) values = stypes . emptyDoubleVector ( maxn ) maxn = ctypes . c_int ( maxn ) libspice . bodvcd_c ( bodyid , item , maxn , ctypes . byref ( dim ) , values ) return dim . value , stypes . cVectorToPython ( values )
Fetch from the kernel pool the double precision values of an item associated with a body where the body is specified by an integer ID code .
118
28
246,474
def bodvrd ( bodynm , item , maxn ) : bodynm = stypes . stringToCharP ( bodynm ) item = stypes . stringToCharP ( item ) dim = ctypes . c_int ( ) values = stypes . emptyDoubleVector ( maxn ) maxn = ctypes . c_int ( maxn ) libspice . bodvrd_c ( bodynm , item , maxn , ctypes . byref ( dim ) , values ) return dim . value , stypes . cVectorToPython ( values )
Fetch from the kernel pool the double precision values of an item associated with a body .
119
18
246,475
def bschoc ( value , ndim , lenvals , array , order ) : value = stypes . stringToCharP ( value ) ndim = ctypes . c_int ( ndim ) lenvals = ctypes . c_int ( lenvals ) array = stypes . listToCharArrayPtr ( array , xLen = lenvals , yLen = ndim ) order = stypes . toIntVector ( order ) return libspice . bschoc_c ( value , ndim , lenvals , array , order )
Do a binary search for a given value within a character string array accompanied by an order vector . Return the index of the matching array entry or - 1 if the key value is not found .
115
38
246,476
def bschoi ( value , ndim , array , order ) : value = ctypes . c_int ( value ) ndim = ctypes . c_int ( ndim ) array = stypes . toIntVector ( array ) order = stypes . toIntVector ( order ) return libspice . bschoi_c ( value , ndim , array , order )
Do a binary search for a given value within an integer array accompanied by an order vector . Return the index of the matching array entry or - 1 if the key value is not found .
83
37
246,477
def bsrchc ( value , ndim , lenvals , array ) : value = stypes . stringToCharP ( value ) ndim = ctypes . c_int ( ndim ) lenvals = ctypes . c_int ( lenvals ) array = stypes . listToCharArrayPtr ( array , xLen = lenvals , yLen = ndim ) return libspice . bsrchc_c ( value , ndim , lenvals , array )
Do a binary earch for a given value within a character string array . Return the index of the first matching array entry or - 1 if the key value was not found .
102
35
246,478
def bsrchd ( value , ndim , array ) : value = ctypes . c_double ( value ) ndim = ctypes . c_int ( ndim ) array = stypes . toDoubleVector ( array ) return libspice . bsrchd_c ( value , ndim , array )
Do a binary search for a key value within a double precision array assumed to be in increasing order . Return the index of the matching array entry or - 1 if the key value is not found .
68
39
246,479
def bsrchi ( value , ndim , array ) : value = ctypes . c_int ( value ) ndim = ctypes . c_int ( ndim ) array = stypes . toIntVector ( array ) return libspice . bsrchi_c ( value , ndim , array )
Do a binary search for a key value within an integer array assumed to be in increasing order . Return the index of the matching array entry or - 1 if the key value is not found .
66
38
246,480
def ccifrm ( frclss , clssid , lenout = _default_len_out ) : frclss = ctypes . c_int ( frclss ) clssid = ctypes . c_int ( clssid ) lenout = ctypes . c_int ( lenout ) frcode = ctypes . c_int ( ) frname = stypes . stringToCharP ( lenout ) center = ctypes . c_int ( ) found = ctypes . c_int ( ) libspice . ccifrm_c ( frclss , clssid , lenout , ctypes . byref ( frcode ) , frname , ctypes . byref ( center ) , ctypes . byref ( found ) ) return frcode . value , stypes . toPythonString ( frname ) , center . value , bool ( found . value )
Return the frame name frame ID and center associated with a given frame class and class ID .
191
18
246,481
def cgv2el ( center , vec1 , vec2 ) : center = stypes . toDoubleVector ( center ) vec1 = stypes . toDoubleVector ( vec1 ) vec2 = stypes . toDoubleVector ( vec2 ) ellipse = stypes . Ellipse ( ) libspice . cgv2el_c ( center , vec1 , vec2 , ctypes . byref ( ellipse ) ) return ellipse
Form a SPICE ellipse from a center vector and two generating vectors .
100
16
246,482
def cidfrm ( cent , lenout = _default_len_out ) : cent = ctypes . c_int ( cent ) lenout = ctypes . c_int ( lenout ) frcode = ctypes . c_int ( ) frname = stypes . stringToCharP ( lenout ) found = ctypes . c_int ( ) libspice . cidfrm_c ( cent , lenout , ctypes . byref ( frcode ) , frname , ctypes . byref ( found ) ) return frcode . value , stypes . toPythonString ( frname ) , bool ( found . value )
Retrieve frame ID code and name to associate with a frame center .
139
14
246,483
def ckcov ( ck , idcode , needav , level , tol , timsys , cover = None ) : ck = stypes . stringToCharP ( ck ) idcode = ctypes . c_int ( idcode ) needav = ctypes . c_int ( needav ) level = stypes . stringToCharP ( level ) tol = ctypes . c_double ( tol ) timsys = stypes . stringToCharP ( timsys ) if not cover : cover = stypes . SPICEDOUBLE_CELL ( 20000 ) assert isinstance ( cover , stypes . SpiceCell ) assert cover . dtype == 1 libspice . ckcov_c ( ck , idcode , needav , level , tol , timsys , ctypes . byref ( cover ) ) return cover
Find the coverage window for a specified object in a specified CK file .
186
14
246,484
def cklpf ( filename ) : filename = stypes . stringToCharP ( filename ) handle = ctypes . c_int ( ) libspice . cklpf_c ( filename , ctypes . byref ( handle ) ) return handle . value
Load a CK pointing file for use by the CK readers . Return that file s handle to be used by other CK routines to refer to the file .
57
30
246,485
def ckobj ( ck , outCell = None ) : assert isinstance ( ck , str ) ck = stypes . stringToCharP ( ck ) if not outCell : outCell = stypes . SPICEINT_CELL ( 1000 ) assert isinstance ( outCell , stypes . SpiceCell ) assert outCell . dtype == 2 libspice . ckobj_c ( ck , ctypes . byref ( outCell ) ) return outCell
Find the set of ID codes of all objects in a specified CK file .
104
15
246,486
def ckopn ( filename , ifname , ncomch ) : filename = stypes . stringToCharP ( filename ) ifname = stypes . stringToCharP ( ifname ) ncomch = ctypes . c_int ( ncomch ) handle = ctypes . c_int ( ) libspice . ckopn_c ( filename , ifname , ncomch , ctypes . byref ( handle ) ) return handle . value
Open a new CK file returning the handle of the opened file .
100
13
246,487
def ckw01 ( handle , begtim , endtim , inst , ref , avflag , segid , nrec , sclkdp , quats , avvs ) : handle = ctypes . c_int ( handle ) begtim = ctypes . c_double ( begtim ) endtim = ctypes . c_double ( endtim ) inst = ctypes . c_int ( inst ) ref = stypes . stringToCharP ( ref ) avflag = ctypes . c_int ( avflag ) segid = stypes . stringToCharP ( segid ) sclkdp = stypes . toDoubleVector ( sclkdp ) quats = stypes . toDoubleMatrix ( quats ) avvs = stypes . toDoubleMatrix ( avvs ) nrec = ctypes . c_int ( nrec ) libspice . ckw01_c ( handle , begtim , endtim , inst , ref , avflag , segid , nrec , sclkdp , quats , avvs )
Add a type 1 segment to a C - kernel .
227
11
246,488
def ckw02 ( handle , begtim , endtim , inst , ref , segid , nrec , start , stop , quats , avvs , rates ) : handle = ctypes . c_int ( handle ) begtim = ctypes . c_double ( begtim ) endtim = ctypes . c_double ( endtim ) inst = ctypes . c_int ( inst ) ref = stypes . stringToCharP ( ref ) segid = stypes . stringToCharP ( segid ) start = stypes . toDoubleVector ( start ) stop = stypes . toDoubleVector ( stop ) rates = stypes . toDoubleVector ( rates ) quats = stypes . toDoubleMatrix ( quats ) avvs = stypes . toDoubleMatrix ( avvs ) nrec = ctypes . c_int ( nrec ) libspice . ckw02_c ( handle , begtim , endtim , inst , ref , segid , nrec , start , stop , quats , avvs , rates )
Write a type 2 segment to a C - kernel .
226
11
246,489
def ckw03 ( handle , begtim , endtim , inst , ref , avflag , segid , nrec , sclkdp , quats , avvs , nints , starts ) : handle = ctypes . c_int ( handle ) begtim = ctypes . c_double ( begtim ) endtim = ctypes . c_double ( endtim ) inst = ctypes . c_int ( inst ) ref = stypes . stringToCharP ( ref ) avflag = ctypes . c_int ( avflag ) segid = stypes . stringToCharP ( segid ) sclkdp = stypes . toDoubleVector ( sclkdp ) quats = stypes . toDoubleMatrix ( quats ) avvs = stypes . toDoubleMatrix ( avvs ) nrec = ctypes . c_int ( nrec ) starts = stypes . toDoubleVector ( starts ) nints = ctypes . c_int ( nints ) libspice . ckw03_c ( handle , begtim , endtim , inst , ref , avflag , segid , nrec , sclkdp , quats , avvs , nints , starts )
Add a type 3 segment to a C - kernel .
261
11
246,490
def ckw05 ( handle , subtype , degree , begtim , endtim , inst , ref , avflag , segid , sclkdp , packts , rate , nints , starts ) : handle = ctypes . c_int ( handle ) subtype = ctypes . c_int ( subtype ) degree = ctypes . c_int ( degree ) begtim = ctypes . c_double ( begtim ) endtim = ctypes . c_double ( endtim ) inst = ctypes . c_int ( inst ) ref = stypes . stringToCharP ( ref ) avflag = ctypes . c_int ( avflag ) segid = stypes . stringToCharP ( segid ) n = ctypes . c_int ( len ( packts ) ) sclkdp = stypes . toDoubleVector ( sclkdp ) packts = stypes . toDoubleMatrix ( packts ) rate = ctypes . c_double ( rate ) nints = ctypes . c_int ( nints ) starts = stypes . toDoubleVector ( starts ) libspice . ckw05_c ( handle , subtype , degree , begtim , endtim , inst , ref , avflag , segid , n , sclkdp , packts , rate , nints , starts )
Write a type 5 segment to a CK file .
289
10
246,491
def cltext ( fname ) : fnameP = stypes . stringToCharP ( fname ) fname_len = ctypes . c_int ( len ( fname ) ) libspice . cltext_ ( fnameP , fname_len )
Internal undocumented command for closing a text file opened by RDTEXT .
58
14
246,492
def cmprss ( delim , n , instr , lenout = _default_len_out ) : delim = ctypes . c_char ( delim . encode ( encoding = 'UTF-8' ) ) n = ctypes . c_int ( n ) instr = stypes . stringToCharP ( instr ) output = stypes . stringToCharP ( lenout ) libspice . cmprss_c ( delim , n , instr , lenout , output ) return stypes . toPythonString ( output )
Compress a character string by removing occurrences of more than N consecutive occurrences of a specified character .
111
19
246,493
def cnmfrm ( cname , lenout = _default_len_out ) : lenout = ctypes . c_int ( lenout ) frname = stypes . stringToCharP ( lenout ) cname = stypes . stringToCharP ( cname ) found = ctypes . c_int ( ) frcode = ctypes . c_int ( ) libspice . cnmfrm_c ( cname , lenout , ctypes . byref ( frcode ) , frname , ctypes . byref ( found ) ) return frcode . value , stypes . toPythonString ( frname ) , bool ( found . value )
Retrieve frame ID code and name to associate with an object .
144
13
246,494
def convrt ( x , inunit , outunit ) : inunit = stypes . stringToCharP ( inunit ) outunit = stypes . stringToCharP ( outunit ) y = ctypes . c_double ( ) if hasattr ( x , "__iter__" ) : outArray = [ ] for n in x : libspice . convrt_c ( n , inunit , outunit , ctypes . byref ( y ) ) outArray . append ( y . value ) return outArray x = ctypes . c_double ( x ) libspice . convrt_c ( x , inunit , outunit , ctypes . byref ( y ) ) return y . value
Take a measurement X the units associated with X and units to which X should be converted ; return Y the value of the measurement in the output units .
153
30
246,495
def copy ( cell ) : assert isinstance ( cell , stypes . SpiceCell ) # Next line was redundant with [raise NotImpImplementedError] below # assert cell.dtype == 0 or cell.dtype == 1 or cell.dtype == 2 if cell . dtype is 0 : newcopy = stypes . SPICECHAR_CELL ( cell . size , cell . length ) elif cell . dtype is 1 : newcopy = stypes . SPICEDOUBLE_CELL ( cell . size ) elif cell . dtype is 2 : newcopy = stypes . SPICEINT_CELL ( cell . size ) else : raise NotImplementedError libspice . copy_c ( ctypes . byref ( cell ) , ctypes . byref ( newcopy ) ) return newcopy
Copy the contents of a SpiceCell of any data type to another cell of the same type .
178
19
246,496
def cpos ( string , chars , start ) : string = stypes . stringToCharP ( string ) chars = stypes . stringToCharP ( chars ) start = ctypes . c_int ( start ) return libspice . cpos_c ( string , chars , start )
Find the first occurrence in a string of a character belonging to a collection of characters starting at a specified location searching forward .
62
24
246,497
def cposr ( string , chars , start ) : string = stypes . stringToCharP ( string ) chars = stypes . stringToCharP ( chars ) start = ctypes . c_int ( start ) return libspice . cposr_c ( string , chars , start )
Find the first occurrence in a string of a character belonging to a collection of characters starting at a specified location searching in reverse .
64
25
246,498
def cvpool ( agent ) : agent = stypes . stringToCharP ( agent ) update = ctypes . c_int ( ) libspice . cvpool_c ( agent , ctypes . byref ( update ) ) return bool ( update . value )
Indicate whether or not any watched kernel variables that have a specified agent on their notification list have been updated .
58
22
246,499
def cyllat ( r , lonc , z ) : r = ctypes . c_double ( r ) lonc = ctypes . c_double ( lonc ) z = ctypes . c_double ( z ) radius = ctypes . c_double ( ) lon = ctypes . c_double ( ) lat = ctypes . c_double ( ) libspice . cyllat_c ( r , lonc , z , ctypes . byref ( radius ) , ctypes . byref ( lon ) , ctypes . byref ( lat ) ) return radius . value , lon . value , lat . value
Convert from cylindrical to latitudinal coordinates .
141
11