idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
31,300
def help ( ) : print ( "Please use '{} <target>' where <target> is one of" . format ( sys . argv [ 0 ] ) ) width = max ( len ( name ) for name in TARGETS ) for name , target in TARGETS . items ( ) : print ( ' {name:{width}} {descr}' . format ( name = name , width = width , descr = target . __doc__ ) )
List all targets
31,301
def fonts ( self ) : for width in ( w for w in FontWidth if w in self ) : for slant in ( s for s in FontSlant if s in self [ width ] ) : for weight in ( w for w in FontWeight if w in self [ width ] [ slant ] ) : yield self [ width ] [ slant ] [ weight ]
Generator yielding all fonts of this typeface
31,302
def get_font ( self , weight = 'medium' , slant = 'upright' , width = 'normal' ) : def find_closest_style ( style , styles , alternatives ) : try : return style , styles [ style ] except KeyError : for option in alternatives [ style ] : try : return option , styles [ option ] except KeyError : continue def find_closest...
Return the font matching or closest to the given style
31,303
def get_selector ( self , name ) : try : return self . matcher . by_name [ name ] except ( AttributeError , KeyError ) : if self . base is not None : return self . base . get_selector ( name ) else : raise KeyError ( "No selector found for style '{}'" . format ( name ) )
Find a selector mapped to a style in this or a base style sheet .
31,304
def match ( self , chars ) : start_index = self . next_index for char in self : if char not in chars : self . next_index -= 1 break return self [ start_index : self . next_index ]
Return all next characters that are listed in chars as a string
31,305
def _rel_import ( module , tgt ) : try : exec ( "from ." + module + " import " + tgt , globals ( ) , locals ( ) ) except SyntaxError : exec ( "from " + module + " import " + tgt , globals ( ) , locals ( ) ) except ( ValueError , SystemError ) : exec ( "from " + module + " import " + tgt , globals ( ) , locals ( ) ) ret...
Using relative import in both Python 2 and Python 3
31,306
def peekiter ( iterable ) : it = iter ( iterable ) one = next ( it ) def gen ( ) : yield one while True : yield next ( it ) return ( one , gen ( ) )
Return first row and also iterable with same items as original
31,307
def check_sizes ( size , width , height ) : if not size : return width , height if len ( size ) != 2 : raise ValueError ( "size argument should be a pair (width, height)" ) if width is not None and width != size [ 0 ] : raise ValueError ( "size[0] (%r) and width (%r) should match when both are used." % ( size [ 0 ] , w...
Check that these arguments in supplied are consistent .
31,308
def check_color ( c , greyscale , which ) : if c is None : return c if greyscale : try : len ( c ) except TypeError : c = ( c , ) if len ( c ) != 1 : raise ValueError ( "%s for greyscale must be 1-tuple" % which ) if not isinteger ( c [ 0 ] ) : raise ValueError ( "%s colour for greyscale must be integer" % which ) else...
Checks that a colour argument is the right form .
31,309
def check_time ( value ) : if value is None : return None if isinstance ( value , ( time . struct_time , tuple ) ) : return value if isinstance ( value , datetime . datetime ) : return value . timetuple ( ) if isinstance ( value , datetime . date ) : res = datetime . datetime . utcnow ( ) res . replace ( year = value ....
Convert time from most popular representations to datetime
31,310
def try_greyscale ( pixels , alpha = False , dirty_alpha = True ) : planes = 3 + bool ( alpha ) res = list ( ) apix = list ( ) for row in pixels : green = row [ 1 : : planes ] if alpha : apix . append ( row [ 4 : planes ] ) if ( green != row [ 0 : : planes ] or green != row [ 2 : : planes ] ) : return False else : res ...
Check if flatboxed RGB pixels could be converted to greyscale
31,311
def adapt_sum ( line , cfg , filter_obj ) : lines = filter_obj . filter_all ( line ) res_s = [ sum ( it ) for it in lines ] r = res_s . index ( min ( res_s ) ) return lines [ r ]
Determine best filter by sum of all row values
31,312
def adapt_entropy ( line , cfg , filter_obj ) : lines = filter_obj . filter_all ( line ) res_c = [ len ( set ( it ) ) for it in lines ] r = res_c . index ( min ( res_c ) ) return lines [ r ]
Determine best filter by dispersion of row values
31,313
def __do_filter_sub ( self , scanline , result ) : ai = 0 for i in range ( self . fu , len ( result ) ) : x = scanline [ i ] a = scanline [ ai ] result [ i ] = ( x - a ) & 0xff ai += 1
Sub filter .
31,314
def __do_filter_up ( self , scanline , result ) : previous = self . prev for i in range ( len ( result ) ) : x = scanline [ i ] b = previous [ i ] result [ i ] = ( x - b ) & 0xff
Up filter .
31,315
def __undo_filter_average ( self , scanline ) : ai = - self . fu previous = self . prev for i in range ( len ( scanline ) ) : x = scanline [ i ] if ai < 0 : a = 0 else : a = scanline [ ai ] b = previous [ i ] scanline [ i ] = ( x + ( ( a + b ) >> 1 ) ) & 0xff ai += 1
Undo average filter .
31,316
def undo_filter ( self , filter_type , line ) : assert 0 <= filter_type <= 4 if self . prev is None : self . prev = newBarray ( len ( line ) ) if filter_type == 2 : filter_type = 0 elif filter_type == 4 : filter_type = 1 if filter_type == 1 : self . __undo_filter_sub ( line ) elif filter_type == 2 : self . __undo_filte...
Undo the filter for a scanline .
31,317
def _filter_scanline ( self , filter_type , line , result ) : assert 0 <= filter_type < 5 if self . prev is None : if filter_type == 2 : filter_type = 0 elif filter_type == 3 : self . prev = newBarray ( len ( line ) ) elif filter_type == 4 : filter_type = 1 if filter_type == 1 : self . __do_filter_sub ( line , result )...
Apply a scanline filter to a scanline .
31,318
def convert_la_to_rgba ( self , row , result ) : for i in range ( len ( row ) // 3 ) : for j in range ( 3 ) : result [ ( 4 * i ) + j ] = row [ 2 * i ] result [ ( 4 * i ) + 3 ] = row [ ( 2 * i ) + 1 ]
Convert a grayscale image with alpha to RGBA .
31,319
def convert_l_to_rgba ( self , row , result ) : for i in range ( len ( row ) // 3 ) : for j in range ( 3 ) : result [ ( 4 * i ) + j ] = row [ i ]
Convert a grayscale image to RGBA .
31,320
def convert_rgb_to_rgba ( self , row , result ) : for i in range ( len ( row ) // 3 ) : for j in range ( 3 ) : result [ ( 4 * i ) + j ] = row [ ( 3 * i ) + j ]
Convert an RGB image to RGBA .
31,321
def set_icc_profile ( self , profile = None , name = 'ICC Profile' ) : if isinstance ( profile , ( basestring , bytes ) ) : icc_profile = [ name , profile ] else : icc_profile = profile if not icc_profile [ 0 ] : raise Error ( "ICC profile should have a name" ) elif not isinstance ( icc_profile [ 0 ] , bytes ) : icc_pr...
Add ICC Profile .
31,322
def set_text ( self , text = None , ** kwargs ) : if text is None : text = { } text . update ( popdict ( kwargs , _registered_kw ) ) if 'Creation Time' in text and not isinstance ( text [ 'Creation Time' ] , ( basestring , bytes ) ) : text [ 'Creation Time' ] = datetime . datetime ( * ( check_time ( text [ 'Creation Ti...
Add textual information passed as dictionary .
31,323
def set_modification_time ( self , modification_time = True ) : if ( isinstance ( modification_time , basestring ) and modification_time . lower ( ) == 'write' ) or modification_time is True : self . modification_time = True else : self . modification_time = check_time ( modification_time )
Add time to be written as last modification time
31,324
def set_resolution ( self , resolution = None ) : if resolution is None : self . resolution = None return if len ( resolution ) == 3 : resolution = ( ( resolution [ 0 ] , resolution [ 1 ] ) , resolution [ 2 ] ) if len ( resolution ) == 1 or not resolution [ 1 ] : resolution = ( resolution [ 0 ] , 0 ) if isinstance ( re...
Add physical pixel dimensions
31,325
def set_rendering_intent ( self , rendering_intent ) : if rendering_intent not in ( None , PERCEPTUAL , RELATIVE_COLORIMETRIC , SATURATION , ABSOLUTE_COLORIMETRIC ) : raise FormatError ( 'Unknown redering intent' ) self . rendering_intent = rendering_intent
Set rendering intent variant for sRGB chunk
31,326
def set_white_point ( self , white_point , point2 = None ) : if isinstance ( white_point , float ) and isinstance ( point2 , float ) : white_point = ( white_point , point2 ) self . white_point = white_point
Set white point part of cHRM chunk
31,327
def set_rgb_points ( self , rgb_points , * args ) : if not args : self . rgb_points = rgb_points elif len ( args ) == 2 : self . rgb_points = ( rgb_points , args [ 0 ] , args [ 1 ] ) elif len ( args ) == 5 : self . rgb_points = ( ( rgb_points , args [ 0 ] ) , ( args [ 1 ] , args [ 2 ] ) , ( args [ 3 ] , args [ 4 ] ) )
Set rgb points part of cHRM chunk
31,328
def __write_palette ( self , outfile ) : p = bytearray ( ) t = bytearray ( ) for x in self . palette : p . extend ( x [ 0 : 3 ] ) if len ( x ) > 3 : t . append ( x [ 3 ] ) write_chunk ( outfile , 'PLTE' , bytearray_to_bytes ( p ) ) if t : write_chunk ( outfile , 'tRNS' , bytearray_to_bytes ( t ) )
Write PLTE and if necessary a tRNS chunk to .
31,329
def __write_text ( self , outfile ) : for k , v in self . text . items ( ) : if not isinstance ( v , bytes ) : try : international = False v = v . encode ( 'latin-1' ) except UnicodeEncodeError : international = True v = v . encode ( 'utf-8' ) else : international = False if not isinstance ( k , bytes ) : k = strtobyte...
Write text information into file
31,330
def write_idat ( self , outfile , idat_sequence ) : outfile . write ( png_signature ) color_type = 4 * self . alpha + 2 * ( not self . greyscale ) + bool ( self . palette ) write_chunk ( outfile , 'IHDR' , struct . pack ( "!2I5B" , self . width , self . height , self . bitdepth , color_type , 0 , 0 , self . interlace )...
Write png with IDAT to file
31,331
def comp_idat ( self , idat ) : if self . compression is not None : compressor = zlib . compressobj ( self . compression ) else : compressor = zlib . compressobj ( ) for dat in idat : compressed = compressor . compress ( dat ) if len ( compressed ) : yield compressed flushed = compressor . flush ( ) if len ( flushed ) ...
Generator that produce compressed IDAT chunks from IDAT data
31,332
def idat ( self , rows , packed = False ) : filt = Filter ( self . bitdepth * self . planes , self . interlace , self . height ) data = bytearray ( ) def byteextend ( rowbytes ) : data . extend ( filt . do_filter ( self . filter_type , rowbytes ) ) if self . bitdepth == 8 or packed : extend = byteextend elif self . bit...
Generator that produce uncompressed IDAT data from rows
31,333
def write_packed ( self , outfile , rows ) : return self . write_passes ( outfile , rows , packed = True )
Write PNG file to outfile .
31,334
def newarray ( self , length , value = 0 ) : if self . bitdepth > 8 : return array ( 'H' , [ value ] * length ) else : return bytearray ( [ value ] * length )
Initialise empty row
31,335
def rigthgen ( self , value = 0 ) : while True : yield self . newarray ( self . nplanes_right * self . width , value )
Generate rows to fill right pixels in int mode
31,336
def next ( self ) : left = next ( self . seq_left ) if self . width is None : self . width = len ( left ) / self . nplanes_left if self . bitdepth is None : if hasattr ( left , 'itemsize' ) : self . bitdepth = left . itemsize * 8 elif isinstance ( left , ( bytes , bytearray ) ) : self . bitdepth = 8 else : raise Error ...
Generate merged row consuming rows of original iterstors
31,337
def filter_all ( self , line ) : lines = [ None ] * 5 for filter_type in range ( 5 ) : res = copyBarray ( line ) self . _filter_scanline ( filter_type , line , res ) res . insert ( 0 , filter_type ) lines [ filter_type ] = res return lines
Doing all filters for specified line
31,338
def do_filter ( self , filter_type , line ) : line = bytearray ( line ) if isinstance ( filter_type , int ) : res = bytearray ( line ) self . _filter_scanline ( filter_type , line , res ) res . insert ( 0 , filter_type ) else : res = self . adaptive_filter ( filter_type , line ) self . prev = line if self . restarts : ...
Applying filter caring about prev line interlacing etc .
31,339
def read ( self , n ) : r = self . buf [ self . offset : self . offset + n ] if isinstance ( r , array ) : r = r . tostring ( ) self . offset += n return r
Read n chars from buffer
31,340
def chunk ( self , seek = None , lenient = False ) : self . validate_signature ( ) while True : if not self . atchunk : self . atchunk = self . chunklentype ( ) length , chunk_type = self . atchunk self . atchunk = None data = self . file . read ( length ) if len ( data ) != length : raise ChunkError ( 'Chunk %s too sh...
Read the next PNG chunk from the input file
31,341
def deinterlace ( self , raw ) : vpr = self . width * self . planes if self . bitdepth > 8 : a = newHarray ( vpr * self . height ) else : a = newBarray ( vpr * self . height ) source_offset = 0 filt = Filter ( self . bitdepth * self . planes ) for xstart , ystart , xstep , ystep in _adam7 : if xstart >= self . width : ...
Read raw pixel data undo filters deinterlace and flatten .
31,342
def iterboxed ( self , rows ) : def asvalues ( raw ) : if self . bitdepth == 8 : return raw if self . bitdepth == 16 : raw = bytearray_to_bytes ( raw ) return array ( 'H' , struct . unpack ( '!%dH' % ( len ( raw ) // 2 ) , raw ) ) assert self . bitdepth < 8 width = self . width spb = 8 // self . bitdepth out = newBarra...
Iterator that yields each scanline in boxed row flat pixel format .
31,343
def iterstraight ( self , raw ) : rb_1 = self . row_bytes + 1 a = bytearray ( ) filt = Filter ( self . bitdepth * self . planes ) for some in raw : a . extend ( some ) offset = 0 while len ( a ) >= rb_1 + offset : filter_type = a [ offset ] if filter_type not in ( 0 , 1 , 2 , 3 , 4 ) : raise FormatError ( 'Invalid PNG ...
Iterator that undoes the effect of filtering
31,344
def preamble ( self , lenient = False ) : self . validate_signature ( ) while True : if not self . atchunk : self . atchunk = self . chunklentype ( ) if self . atchunk is None : raise FormatError ( 'This PNG file has no IDAT chunks.' ) if self . atchunk [ 1 ] == 'IDAT' : return self . process_chunk ( lenient = lenient ...
Extract the image metadata
31,345
def idat ( self , lenient = False ) : while True : try : chunk_type , data = self . chunk ( lenient = lenient ) except ValueError : e = sys . exc_info ( ) [ 1 ] raise ChunkError ( e . args [ 0 ] ) if chunk_type == 'IEND' : break if chunk_type != 'IDAT' : continue if self . colormap and not self . plte : warnings . warn...
Iterator that yields all the IDAT chunks as strings .
31,346
def idatdecomp ( self , lenient = False , max_length = 0 ) : d = zlib . decompressobj ( ) for data in self . idat ( lenient ) : yield bytearray ( d . decompress ( data ) ) yield bytearray ( d . flush ( ) )
Iterator that yields decompressed IDAT strings .
31,347
def read ( self , lenient = False ) : self . preamble ( lenient = lenient ) raw = self . idatdecomp ( lenient ) if self . interlace : raw = bytearray ( itertools . chain ( * raw ) ) arraycode = 'BH' [ self . bitdepth > 8 ] pixels = map ( lambda * row : array ( arraycode , row ) , * [ iter ( self . deinterlace ( raw ) )...
Read the PNG file and decode it .
31,348
def read_flat ( self ) : x , y , pixel , meta = self . read ( ) arraycode = 'BH' [ meta [ 'bitdepth' ] > 8 ] pixel = array ( arraycode , itertools . chain ( * pixel ) ) return x , y , pixel , meta
Read a PNG file and decode it into flat row flat pixel format .
31,349
def asRGB ( self ) : width , height , pixels , meta = self . asDirect ( ) if meta [ 'alpha' ] : raise Error ( "will not convert image with alpha channel to RGB" ) if not meta [ 'greyscale' ] : return width , height , pixels , meta meta [ 'greyscale' ] = False newarray = ( newBarray , newHarray ) [ meta [ 'bitdepth' ] >...
Return image as RGB pixels .
31,350
def asRGBA ( self ) : width , height , pixels , meta = self . asDirect ( ) if meta [ 'alpha' ] and not meta [ 'greyscale' ] : return width , height , pixels , meta maxval = 2 ** meta [ 'bitdepth' ] - 1 if meta [ 'bitdepth' ] > 8 : def newarray ( ) : return array ( 'H' , [ maxval ] * 4 * width ) else : def newarray ( ) ...
Return image as RGBA pixels .
31,351
def chromaticity_to_XYZ ( white , red , green , blue ) : xW , yW = white xR , yR = red xG , yG = green xB , yB = blue R = G = B = 1.0 z = yW * ( ( xG - xB ) * yR - ( xR - xB ) * yG + ( xR - xG ) * yB ) YA = yR / R * ( ( xG - xB ) * yW - ( xW - xB ) * yG + ( xW - xG ) * yB ) / z XA = YA * xR / yR ZA = YA * ( ( 1 - xR ) ...
From the CalRGB Color Spaces section of PDF Reference 6th ed .
31,352
def fallback ( cache ) : log_filter = ThrottlingFilter ( cache = cache ) logger . filters = [ ] logger . addFilter ( log_filter ) def get_cache_response ( cache_key ) : content = cache . get ( cache_key ) if content : response = CacheResponse ( ) response . __setstate__ ( { 'status_code' : 200 , '_content' : content , ...
Caches content retrieved by the client thus allowing the cached content to be used later if the live content cannot be retrieved .
31,353
def build_url ( base_url , partial_url ) : if not base_url . endswith ( '/' ) : base_url += '/' if partial_url . startswith ( '/' ) : partial_url = partial_url [ 1 : ] return urlparse . urljoin ( base_url , partial_url )
Makes sure the URL is built properly .
31,354
def form_number ( self ) : k1 , o1 , m2 , s2 = ( np . extract ( self . model [ 'constituent' ] == c , self . model [ 'amplitude' ] ) for c in [ constituent . _K1 , constituent . _O1 , constituent . _M2 , constituent . _S2 ] ) return ( k1 + o1 ) / ( m2 + s2 )
Returns the model s form number a helpful heuristic for classifying tides .
31,355
def normalize ( self ) : for i , ( _ , amplitude , phase ) in enumerate ( self . model ) : if amplitude < 0 : self . model [ 'amplitude' ] [ i ] = - amplitude self . model [ 'phase' ] [ i ] = phase + 180.0 self . model [ 'phase' ] [ i ] = np . mod ( self . model [ 'phase' ] [ i ] , 360.0 )
Adapt self . model so that amplitudes are positive and phases are in [ 0 360 ) as per convention
31,356
def _unescape ( v ) : i = 0 backslash = False while i < len ( v ) : if backslash : backslash = False if v [ i ] in _escapes : v = v [ : i - 1 ] + _escape_to_escapedchars [ v [ i ] ] + v [ i + 1 : ] elif v [ i ] == '\\' : v = v [ : i - 1 ] + v [ i : ] elif v [ i ] == 'u' or v [ i ] == 'U' : i += 1 else : raise TomlDecod...
Unescape characters in a TOML string .
31,357
def parse ( self ) : index_server = None for num , line in enumerate ( self . iter_lines ( ) ) : line = line . rstrip ( ) if not line : continue if line . startswith ( '#' ) : continue if line . startswith ( '-i' ) or line . startswith ( '--index-url' ) or line . startswith ( '--extra-index-url' ) : index_server = self...
Parses a requirements . txt - like file
31,358
def _format_help_dicts ( help_dicts , display_defaults = False ) : help_strs = [ ] for help_dict in help_dicts : help_str = "%s (%s" % ( help_dict [ "var_name" ] , "Required" if help_dict [ "required" ] else "Optional" , ) if help_dict . get ( "default" ) and display_defaults : help_str += ", Default=%s)" % help_dict [...
Format the output of _generate_help_dicts into a str
31,359
def _generate_help_dicts ( config_cls , _prefix = None ) : help_dicts = [ ] if _prefix is None : _prefix = config_cls . _prefix for a in attr . fields ( config_cls ) : try : ce = a . metadata [ CNF_KEY ] except KeyError : continue if ce . sub_cls is None : if ce . name is None : var_name = "_" . join ( ( _prefix , a . ...
Generate dictionaries for use in building help strings .
31,360
def generate_help ( config_cls , ** kwargs ) : try : formatter = kwargs . pop ( "formatter" ) except KeyError : formatter = _format_help_dicts help_dicts = _generate_help_dicts ( config_cls ) return formatter ( help_dicts , ** kwargs )
Autogenerate a help string for a config class .
31,361
def render_to_json ( response , request = None , ** kwargs ) : if hasattr ( response , 'status_code' ) : status_code = response . status_code elif issubclass ( type ( response ) , Http404 ) : status_code = 404 elif issubclass ( type ( response ) , Exception ) : status_code = 500 logger . exception ( str ( response ) , ...
Creates the main structure and returns the JSON response .
31,362
def dispatch ( self , request , * args , ** kwargs ) : ajax_kwargs = { 'mandatory' : self . ajax_mandatory } if self . json_encoder : ajax_kwargs [ 'cls' ] = self . json_encoder return ajax ( ** ajax_kwargs ) ( super ( AJAXMixin , self ) . dispatch ) ( request , * args , ** kwargs )
Using ajax decorator
31,363
def is_opendap ( url ) : if url . endswith ( '#fillmismatch' ) : das_url = url . replace ( '#fillmismatch' , '.das' ) else : das_url = url + '.das' response = requests . get ( das_url , allow_redirects = True ) if 'xdods-server' in response . headers : return True if response . status_code == 401 and 'text/html' in res...
Returns True if the URL is a valid OPeNDAP URL
31,364
def datetime_is_iso ( date_str ) : try : if len ( date_str ) > 10 : dt = isodate . parse_datetime ( date_str ) else : dt = isodate . parse_date ( date_str ) return True , [ ] except : return False , [ 'Datetime provided is not in a valid ISO 8601 format' ]
Attempts to parse a date formatted in ISO 8601 format
31,365
def is_cdl ( filename ) : if os . path . splitext ( filename ) [ - 1 ] != '.cdl' : return False with open ( filename , 'rb' ) as f : data = f . read ( 32 ) if data . startswith ( b'netcdf' ) or b'dimensions' in data : return True return False
Quick check for . cdl ascii file
31,366
def run_checker ( cls , ds_loc , checker_names , verbose , criteria , skip_checks = None , output_filename = '-' , output_format = [ 'text' ] ) : all_groups = [ ] cs = CheckSuite ( ) score_dict = OrderedDict ( ) if not isinstance ( ds_loc , six . string_types ) : locs = ds_loc else : locs = [ ds_loc ] if isinstance ( o...
Static check runner .
31,367
def stdout_output ( cls , cs , score_dict , verbose , limit ) : for ds , score_groups in six . iteritems ( score_dict ) : for checker , rpair in six . iteritems ( score_groups ) : groups , errors = rpair score_list , points , out_of = cs . standard_output ( ds , limit , checker , groups ) cs . standard_output_generatio...
Calls output routine to display results in terminal including scoring . Goes to verbose function if called by user .
31,368
def check_time_period ( self , ds ) : start = self . std_check ( ds , 'time_coverage_start' ) end = self . std_check ( ds , 'time_coverage_end' ) msgs = [ ] count = 2 if not start : count -= 1 msgs . append ( "Attr 'time_coverage_start' is missing" ) if not end : count -= 1 msgs . append ( "Attr 'time_coverage_end' is ...
Check that time period attributes are both set .
31,369
def check_station_location_lat ( self , ds ) : gmin = self . std_check ( ds , 'geospatial_lat_min' ) gmax = self . std_check ( ds , 'geospatial_lat_max' ) msgs = [ ] count = 2 if not gmin : count -= 1 msgs . append ( "Attr 'geospatial_lat_min' is missing" ) if not gmax : count -= 1 msgs . append ( "Attr 'geospatial_lat...
Checks station lat attributes are set
31,370
def check_global_attributes ( self , ds ) : return [ self . _has_attr ( ds , 'acknowledgement' , 'Platform Sponsor' ) , self . _has_attr ( ds , 'publisher_email' , 'Station Publisher Email' ) , self . _has_attr ( ds , 'publisher_email' , 'Service Contact Email' , BaseCheck . MEDIUM ) , self . _has_attr ( ds , 'institut...
Check all global NC attributes for existence .
31,371
def check_variable_attributes ( self , ds ) : return [ self . _has_var_attr ( ds , 'platform' , 'long_name' , 'Station Long Name' ) , self . _has_var_attr ( ds , 'platform' , 'short_name' , 'Station Short Name' ) , self . _has_var_attr ( ds , 'platform' , 'source' , 'Platform Type' ) , self . _has_var_attr ( ds , 'plat...
Check IOOS concepts that come from NC variable attributes .
31,372
def check_variable_names ( self , ds ) : msgs = [ ] count = 0 for k , v in ds . variables . items ( ) : if 'standard_name' in v . ncattrs ( ) : count += 1 else : msgs . append ( "Variable '{}' missing standard_name attr" . format ( k ) ) return Result ( BaseCheck . MEDIUM , ( count , len ( ds . variables ) ) , 'Variabl...
Ensures all variables have a standard_name set .
31,373
def check_altitude_units ( self , ds ) : if 'z' in ds . variables : msgs = [ ] val = 'units' in ds . variables [ 'z' ] . ncattrs ( ) if not val : msgs . append ( "Variable 'z' has no units attr" ) return Result ( BaseCheck . LOW , val , 'Altitude Units' , msgs ) return Result ( BaseCheck . LOW , ( 0 , 0 ) , 'Altitude U...
If there s a variable named z it must have units .
31,374
def check_platform_variables ( self , ds ) : platform_names = getattr ( ds , 'platform' , '' ) . split ( ' ' ) val = all ( platform_name in ds . variables for platform_name in platform_names ) msgs = [ ] if not val : msgs = [ ( 'The value of "platform" global attribute should be set to another variable ' 'which contain...
The value of platform attribute should be set to another variable which contains the details of the platform . There can be multiple platforms involved depending on if all the instances of the featureType in the collection share the same platform or not . If multiple platforms are involved a variable should be defined ...
31,375
def check_geophysical_vars_fill_value ( self , ds ) : results = [ ] for geo_var in get_geophysical_variables ( ds ) : results . append ( self . _has_var_attr ( ds , geo_var , '_FillValue' , '_FillValue' , BaseCheck . MEDIUM ) , ) return results
Check that geophysical variables contain fill values .
31,376
def check_geophysical_vars_standard_name ( self , ds ) : results = [ ] for geo_var in get_geophysical_variables ( ds ) : results . append ( self . _has_var_attr ( ds , geo_var , 'standard_name' , 'geophysical variables standard_name' ) , ) return results
Check that geophysical variables contain standard names .
31,377
def is_dimensionless_standard_name ( xml_tree , standard_name ) : if not isinstance ( standard_name , basestring ) : return False found_standard_name = xml_tree . find ( ".//entry[@id='{}']" . format ( standard_name ) ) if found_standard_name is not None : canonical_units = found_standard_name . find ( 'canonical_units...
Returns True if the units for the associated standard name are dimensionless . Dimensionless standard names include those that have no units and units that are defined as constant units in the CF standard name table i . e . 1 or 1e - 3 .
31,378
def is_unitless ( ds , variable ) : units = getattr ( ds . variables [ variable ] , 'units' , None ) return units is None or units == ''
Returns true if the variable is unitless
31,379
def get_cell_boundary_map ( ds ) : boundary_map = { } for variable in ds . get_variables_by_attributes ( bounds = lambda x : x is not None ) : if variable . bounds in ds . variables : boundary_map [ variable . name ] = variable . bounds return boundary_map
Returns a dictionary mapping a variable to its boundary variable . The returned dictionary maps a string variable name to the name of the boundary variable .
31,380
def get_cell_boundary_variables ( ds ) : boundary_variables = [ ] has_bounds = ds . get_variables_by_attributes ( bounds = lambda x : x is not None ) for var in has_bounds : if var . bounds in ds . variables : boundary_variables . append ( var . bounds ) return boundary_variables
Returns a list of variable names for variables that represent cell boundaries through the bounds attribute
31,381
def get_geophysical_variables ( ds ) : parameters = [ ] for variable in ds . variables : if is_geophysical ( ds , variable ) : parameters . append ( variable ) return parameters
Returns a list of variable names for the variables detected as geophysical variables .
31,382
def get_z_variables ( nc ) : z_variables = [ ] total_coords = get_coordinate_variables ( nc ) + get_auxiliary_coordinate_variables ( nc ) for coord_name in total_coords : if coord_name in z_variables : continue coord_var = nc . variables [ coord_name ] units = getattr ( coord_var , 'units' , None ) positive = getattr (...
Returns a list of all variables matching definitions for Z
31,383
def get_latitude_variables ( nc ) : latitude_variables = [ ] for variable in nc . get_variables_by_attributes ( standard_name = "latitude" ) : latitude_variables . append ( variable . name ) for variable in nc . get_variables_by_attributes ( axis = 'Y' ) : if variable . name not in latitude_variables : latitude_variabl...
Returns a list of all variables matching definitions for latitude
31,384
def get_true_latitude_variables ( nc ) : lats = get_latitude_variables ( nc ) true_lats = [ ] for lat in lats : standard_name = getattr ( nc . variables [ lat ] , "standard_name" , None ) units = getattr ( nc . variables [ lat ] , "units" , None ) if standard_name == 'latitude' : true_lats . append ( lat ) elif isinsta...
Returns a list of variables defining true latitude .
31,385
def get_longitude_variables ( nc ) : longitude_variables = [ ] for variable in nc . get_variables_by_attributes ( standard_name = "longitude" ) : longitude_variables . append ( variable . name ) for variable in nc . get_variables_by_attributes ( axis = 'X' ) : if variable . name not in longitude_variables : longitude_v...
Returns a list of all variables matching definitions for longitude
31,386
def get_true_longitude_variables ( nc ) : lons = get_longitude_variables ( nc ) true_lons = [ ] for lon in lons : standard_name = getattr ( nc . variables [ lon ] , "standard_name" , None ) units = getattr ( nc . variables [ lon ] , "units" , None ) if standard_name == 'longitude' : true_lons . append ( lon ) elif isin...
Returns a list of variables defining true longitude .
31,387
def get_platform_variables ( ds ) : candidates = [ ] for variable in ds . variables : platform = getattr ( ds . variables [ variable ] , 'platform' , '' ) if platform and platform in ds . variables : if platform not in candidates : candidates . append ( platform ) platform = getattr ( ds , 'platform' , '' ) if platform...
Returns a list of platform variable NAMES
31,388
def get_instrument_variables ( ds ) : candidates = [ ] for variable in ds . variables : instrument = getattr ( ds . variables [ variable ] , 'instrument' , '' ) if instrument and instrument in ds . variables : if instrument not in candidates : candidates . append ( instrument ) instrument = getattr ( ds , 'instrument' ...
Returns a list of instrument variables
31,389
def get_time_variables ( ds ) : time_variables = set ( ) for variable in ds . get_variables_by_attributes ( standard_name = 'time' ) : time_variables . add ( variable . name ) for variable in ds . get_variables_by_attributes ( axis = 'T' ) : if variable . name not in time_variables : time_variables . add ( variable . n...
Returns a list of variables describing the time coordinate
31,390
def get_axis_variables ( ds ) : axis_variables = [ ] for ncvar in ds . get_variables_by_attributes ( axis = lambda x : x is not None ) : axis_variables . append ( ncvar . name ) return axis_variables
Returns a list of variables that define an axis of the dataset
31,391
def get_climatology_variable ( ds ) : time = get_time_variable ( ds ) if not time : return None if hasattr ( ds . variables [ time ] , 'climatology' ) : if ds . variables [ time ] . climatology in ds . variables : return ds . variables [ time ] . climatology return None
Returns the variable describing climatology bounds if it exists .
31,392
def get_flag_variables ( ds ) : flag_variables = [ ] for name , ncvar in ds . variables . items ( ) : standard_name = getattr ( ncvar , 'standard_name' , None ) if isinstance ( standard_name , basestring ) and 'status_flag' in standard_name : flag_variables . append ( name ) elif hasattr ( ncvar , 'flag_meanings' ) : f...
Returns a list of variables that are defined as flag variables
31,393
def get_grid_mapping_variables ( ds ) : grid_mapping_variables = [ ] for ncvar in ds . get_variables_by_attributes ( grid_mapping = lambda x : x is not None ) : if ncvar . grid_mapping in ds . variables : grid_mapping_variables . append ( ncvar . grid_mapping ) return grid_mapping_variables
Returns a list of grid mapping variables
31,394
def get_axis_map ( ds , variable ) : all_coords = get_coordinate_variables ( ds ) + get_auxiliary_coordinate_variables ( ds ) latitudes = get_latitude_variables ( ds ) longitudes = get_longitude_variables ( ds ) times = get_time_variables ( ds ) heights = get_z_variables ( ds ) coordinates = getattr ( ds . variables [ ...
Returns an axis_map dictionary that contains an axis key and the coordinate names as values .
31,395
def is_coordinate_variable ( ds , variable ) : if variable not in ds . variables : return False return ds . variables [ variable ] . dimensions == ( variable , )
Returns True if the variable is a coordinate variable
31,396
def is_compression_coordinate ( ds , variable ) : if not is_coordinate_variable ( ds , variable ) : return False compress = getattr ( ds . variables [ variable ] , 'compress' , None ) if not isinstance ( compress , basestring ) : return False if not compress : return False if variable in compress : return False for dim...
Returns True if the variable is a coordinate variable that defines a compression scheme .
31,397
def coordinate_dimension_matrix ( nc ) : retval = { } x = get_lon_variable ( nc ) if x : retval [ 'x' ] = nc . variables [ x ] . dimensions y = get_lat_variable ( nc ) if y : retval [ 'y' ] = nc . variables [ y ] . dimensions z = get_z_variable ( nc ) if z : retval [ 'z' ] = nc . variables [ z ] . dimensions t = get_ti...
Returns a dictionary of coordinates mapped to their dimensions
31,398
def is_point ( nc , variable ) : dims = nc . variables [ variable ] . dimensions cmatrix = coordinate_dimension_matrix ( nc ) first_coord = None if 't' in cmatrix : first_coord = cmatrix [ 't' ] if len ( cmatrix [ 't' ] ) > 1 : return False if 'x' in cmatrix : if first_coord is None : first_coord = cmatrix [ 'x' ] if f...
Returns true if the variable is a point feature type
31,399
def is_cf_trajectory ( nc , variable ) : dims = nc . variables [ variable ] . dimensions cmatrix = coordinate_dimension_matrix ( nc ) for req in ( 'x' , 'y' , 't' ) : if req not in cmatrix : return False if len ( cmatrix [ 'x' ] ) != 2 : return False if cmatrix [ 'x' ] != cmatrix [ 'y' ] : return False if cmatrix [ 'x'...
Returns true if the variable is a CF trajectory feature type