repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
ioos/compliance-checker
compliance_checker/cfutil.py
is_timeseries_profile_single_station
def is_timeseries_profile_single_station(nc, variable): ''' Returns true if the variable is a time-series profile that represents a single station and each profile is the same length. :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check ''' # x, y, z(z), t(t) # X(t, z) dims = nc.variables[variable].dimensions cmatrix = coordinate_dimension_matrix(nc) for req in ('x', 'y', 'z', 't'): if req not in cmatrix: return False if len(cmatrix['x']) != 0: return False if cmatrix['x'] != cmatrix['y']: return False z = get_z_variable(nc) if cmatrix['z'] != (z,): return False t = get_time_variable(nc) if cmatrix['t'] != (t,): return False if dims == (t, z): return True return False
python
def is_timeseries_profile_single_station(nc, variable): ''' Returns true if the variable is a time-series profile that represents a single station and each profile is the same length. :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check ''' # x, y, z(z), t(t) # X(t, z) dims = nc.variables[variable].dimensions cmatrix = coordinate_dimension_matrix(nc) for req in ('x', 'y', 'z', 't'): if req not in cmatrix: return False if len(cmatrix['x']) != 0: return False if cmatrix['x'] != cmatrix['y']: return False z = get_z_variable(nc) if cmatrix['z'] != (z,): return False t = get_time_variable(nc) if cmatrix['t'] != (t,): return False if dims == (t, z): return True return False
[ "def", "is_timeseries_profile_single_station", "(", "nc", ",", "variable", ")", ":", "# x, y, z(z), t(t)", "# X(t, z)", "dims", "=", "nc", ".", "variables", "[", "variable", "]", ".", "dimensions", "cmatrix", "=", "coordinate_dimension_matrix", "(", "nc", ")", "fo...
Returns true if the variable is a time-series profile that represents a single station and each profile is the same length. :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check
[ "Returns", "true", "if", "the", "variable", "is", "a", "time", "-", "series", "profile", "that", "represents", "a", "single", "station", "and", "each", "profile", "is", "the", "same", "length", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cfutil.py#L1114-L1145
train
31,400
ioos/compliance-checker
compliance_checker/cfutil.py
is_timeseries_profile_ortho_depth
def is_timeseries_profile_ortho_depth(nc, variable): ''' Returns true if the variable is a time-series profile with orthogonal depth only. :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check ''' # x(i), y(i), z(z), t(i, j) # X(i, j, z) dims = nc.variables[variable].dimensions cmatrix = coordinate_dimension_matrix(nc) for req in ('x', 'y', 'z', 't'): if req not in cmatrix: return False if len(cmatrix['x']) != 1: return False if cmatrix['x'] != cmatrix['y']: return False z = get_z_variable(nc) if cmatrix['z'] != (z,): return False i = cmatrix['x'][0] if len(cmatrix['t']) != 2: return False if cmatrix['t'][0] != i: return False j = cmatrix['t'][1] if dims == (i, j, z): return True return False
python
def is_timeseries_profile_ortho_depth(nc, variable): ''' Returns true if the variable is a time-series profile with orthogonal depth only. :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check ''' # x(i), y(i), z(z), t(i, j) # X(i, j, z) dims = nc.variables[variable].dimensions cmatrix = coordinate_dimension_matrix(nc) for req in ('x', 'y', 'z', 't'): if req not in cmatrix: return False if len(cmatrix['x']) != 1: return False if cmatrix['x'] != cmatrix['y']: return False z = get_z_variable(nc) if cmatrix['z'] != (z,): return False i = cmatrix['x'][0] if len(cmatrix['t']) != 2: return False if cmatrix['t'][0] != i: return False j = cmatrix['t'][1] if dims == (i, j, z): return True return False
[ "def", "is_timeseries_profile_ortho_depth", "(", "nc", ",", "variable", ")", ":", "# x(i), y(i), z(z), t(i, j)", "# X(i, j, z)", "dims", "=", "nc", ".", "variables", "[", "variable", "]", ".", "dimensions", "cmatrix", "=", "coordinate_dimension_matrix", "(", "nc", "...
Returns true if the variable is a time-series profile with orthogonal depth only. :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check
[ "Returns", "true", "if", "the", "variable", "is", "a", "time", "-", "series", "profile", "with", "orthogonal", "depth", "only", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cfutil.py#L1262-L1299
train
31,401
ioos/compliance-checker
compliance_checker/cfutil.py
is_trajectory_profile_orthogonal
def is_trajectory_profile_orthogonal(nc, variable): ''' Returns true if the variable is a trajectory profile with orthogonal depths. :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check ''' # x(i, o), y(i, o), z(z), t(i, o) # X(i, o, z) dims = nc.variables[variable].dimensions cmatrix = coordinate_dimension_matrix(nc) for req in ('x', 'y', 'z', 't'): if req not in cmatrix: return False if len(cmatrix['x']) != 2: return False if cmatrix['x'] != cmatrix['y']: return False if cmatrix['x'] != cmatrix['t']: return False i, o = cmatrix['x'] z = get_z_variable(nc) if cmatrix['z'] != (z,): return False if dims == (i, o, z): return True return False
python
def is_trajectory_profile_orthogonal(nc, variable): ''' Returns true if the variable is a trajectory profile with orthogonal depths. :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check ''' # x(i, o), y(i, o), z(z), t(i, o) # X(i, o, z) dims = nc.variables[variable].dimensions cmatrix = coordinate_dimension_matrix(nc) for req in ('x', 'y', 'z', 't'): if req not in cmatrix: return False if len(cmatrix['x']) != 2: return False if cmatrix['x'] != cmatrix['y']: return False if cmatrix['x'] != cmatrix['t']: return False i, o = cmatrix['x'] z = get_z_variable(nc) if cmatrix['z'] != (z,): return False if dims == (i, o, z): return True return False
[ "def", "is_trajectory_profile_orthogonal", "(", "nc", ",", "variable", ")", ":", "# x(i, o), y(i, o), z(z), t(i, o)", "# X(i, o, z)", "dims", "=", "nc", ".", "variables", "[", "variable", "]", ".", "dimensions", "cmatrix", "=", "coordinate_dimension_matrix", "(", "nc"...
Returns true if the variable is a trajectory profile with orthogonal depths. :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check
[ "Returns", "true", "if", "the", "variable", "is", "a", "trajectory", "profile", "with", "orthogonal", "depths", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cfutil.py#L1344-L1376
train
31,402
ioos/compliance-checker
compliance_checker/cfutil.py
is_mapped_grid
def is_mapped_grid(nc, variable): ''' Returns true if the feature-type of variable corresponds to a mapped grid type. Characterized by Appedix F of CF-1.6 :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check ''' # x(j, i), y(j, i), z?, t? # F(t?, z?, j, i) # The important and really defining characteristic of mapped grids is that # the true latitude and longitude coordinates are functions of (j,i) and # that the geophysical variables are also functions of (j,i) in their # dimensions. dims = nc.variables[variable].dimensions # For cases like ROMS, the coordinates are mapped using the coordinates attribute variable_coordinates = getattr(nc.variables[variable], 'coordinates', '').split() lons = get_longitude_variables(nc) for lon in lons: if lon in variable_coordinates: break else: lon = get_lon_variable(nc) if lon is None: return False lats = get_latitude_variables(nc) for lat in lats: if lat in variable_coordinates: break else: lat = get_lat_variable(nc) if lat is None: return False x = nc.variables[lon].dimensions y = nc.variables[lat].dimensions if len(x) != 2: return False if x != y: return False comma_dimension = ','.join(dims) # Dimensions must be in the same order and the mapping coordinates i and j # must be in the same order if ','.join(x) not in comma_dimension: return False return True
python
def is_mapped_grid(nc, variable): ''' Returns true if the feature-type of variable corresponds to a mapped grid type. Characterized by Appedix F of CF-1.6 :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check ''' # x(j, i), y(j, i), z?, t? # F(t?, z?, j, i) # The important and really defining characteristic of mapped grids is that # the true latitude and longitude coordinates are functions of (j,i) and # that the geophysical variables are also functions of (j,i) in their # dimensions. dims = nc.variables[variable].dimensions # For cases like ROMS, the coordinates are mapped using the coordinates attribute variable_coordinates = getattr(nc.variables[variable], 'coordinates', '').split() lons = get_longitude_variables(nc) for lon in lons: if lon in variable_coordinates: break else: lon = get_lon_variable(nc) if lon is None: return False lats = get_latitude_variables(nc) for lat in lats: if lat in variable_coordinates: break else: lat = get_lat_variable(nc) if lat is None: return False x = nc.variables[lon].dimensions y = nc.variables[lat].dimensions if len(x) != 2: return False if x != y: return False comma_dimension = ','.join(dims) # Dimensions must be in the same order and the mapping coordinates i and j # must be in the same order if ','.join(x) not in comma_dimension: return False return True
[ "def", "is_mapped_grid", "(", "nc", ",", "variable", ")", ":", "# x(j, i), y(j, i), z?, t?", "# F(t?, z?, j, i)", "# The important and really defining characteristic of mapped grids is that", "# the true latitude and longitude coordinates are functions of (j,i) and", "# that the geophysical ...
Returns true if the feature-type of variable corresponds to a mapped grid type. Characterized by Appedix F of CF-1.6 :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check
[ "Returns", "true", "if", "the", "feature", "-", "type", "of", "variable", "corresponds", "to", "a", "mapped", "grid", "type", ".", "Characterized", "by", "Appedix", "F", "of", "CF", "-", "1", ".", "6" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cfutil.py#L1575-L1628
train
31,403
ioos/compliance-checker
compliance_checker/cfutil.py
is_reduced_grid
def is_reduced_grid(nc, variable): ''' Returns True if the feature-type of the variable corresponds to a reduced horizontal grid. :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check ''' axis_map = get_axis_map(nc, variable) if 'X' not in axis_map: return False if 'Y' not in axis_map: return False if 'C' not in axis_map: return False compressed_coordinates = axis_map['C'] if len(compressed_coordinates) > 1: return False compressed_coordinate = axis_map['C'][0] for dim in nc.variables[compressed_coordinate].compress.split(): if dim not in nc.dimensions: return False return True
python
def is_reduced_grid(nc, variable): ''' Returns True if the feature-type of the variable corresponds to a reduced horizontal grid. :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check ''' axis_map = get_axis_map(nc, variable) if 'X' not in axis_map: return False if 'Y' not in axis_map: return False if 'C' not in axis_map: return False compressed_coordinates = axis_map['C'] if len(compressed_coordinates) > 1: return False compressed_coordinate = axis_map['C'][0] for dim in nc.variables[compressed_coordinate].compress.split(): if dim not in nc.dimensions: return False return True
[ "def", "is_reduced_grid", "(", "nc", ",", "variable", ")", ":", "axis_map", "=", "get_axis_map", "(", "nc", ",", "variable", ")", "if", "'X'", "not", "in", "axis_map", ":", "return", "False", "if", "'Y'", "not", "in", "axis_map", ":", "return", "False", ...
Returns True if the feature-type of the variable corresponds to a reduced horizontal grid. :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check
[ "Returns", "True", "if", "the", "feature", "-", "type", "of", "the", "variable", "corresponds", "to", "a", "reduced", "horizontal", "grid", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cfutil.py#L1631-L1655
train
31,404
ioos/compliance-checker
compliance_checker/cfutil.py
guess_feature_type
def guess_feature_type(nc, variable): ''' Returns a string describing the feature type for this variable :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check ''' if is_point(nc, variable): return 'point' if is_timeseries(nc, variable): return 'timeseries' if is_multi_timeseries_orthogonal(nc, variable): return 'multi-timeseries-orthogonal' if is_multi_timeseries_incomplete(nc, variable): return 'multi-timeseries-incomplete' if is_cf_trajectory(nc, variable): return 'cf-trajectory' if is_single_trajectory(nc, variable): return 'single-trajectory' if is_profile_orthogonal(nc, variable): return 'profile-orthogonal' if is_profile_incomplete(nc, variable): return 'profile-incomplete' if is_timeseries_profile_single_station(nc, variable): return 'timeseries-profile-single-station' if is_timeseries_profile_multi_station(nc, variable): return 'timeseries-profile-multi-station' if is_timeseries_profile_single_ortho_time(nc, variable): return 'timeseries-profile-single-ortho-time' if is_timeseries_profile_multi_ortho_time(nc, variable): return 'timeseries-profile-multi-ortho-time' if is_timeseries_profile_ortho_depth(nc, variable): return 'timeseries-profile-ortho-depth' if is_timeseries_profile_incomplete(nc, variable): return 'timeseries-profile-incomplete' if is_trajectory_profile_orthogonal(nc, variable): return 'trajectory-profile-orthogonal' if is_trajectory_profile_incomplete(nc, variable): return 'trajectory-profile-incomplete' if is_2d_regular_grid(nc, variable): return '2d-regular-grid' if is_2d_static_grid(nc, variable): return '2d-static-grid' if is_3d_regular_grid(nc, variable): return '3d-regular-grid' if is_3d_static_grid(nc, variable): return '3d-static-grid' if is_mapped_grid(nc, variable): return 'mapped-grid' if is_reduced_grid(nc, variable): return 'reduced-grid'
python
def guess_feature_type(nc, variable): ''' Returns a string describing the feature type for this variable :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check ''' if is_point(nc, variable): return 'point' if is_timeseries(nc, variable): return 'timeseries' if is_multi_timeseries_orthogonal(nc, variable): return 'multi-timeseries-orthogonal' if is_multi_timeseries_incomplete(nc, variable): return 'multi-timeseries-incomplete' if is_cf_trajectory(nc, variable): return 'cf-trajectory' if is_single_trajectory(nc, variable): return 'single-trajectory' if is_profile_orthogonal(nc, variable): return 'profile-orthogonal' if is_profile_incomplete(nc, variable): return 'profile-incomplete' if is_timeseries_profile_single_station(nc, variable): return 'timeseries-profile-single-station' if is_timeseries_profile_multi_station(nc, variable): return 'timeseries-profile-multi-station' if is_timeseries_profile_single_ortho_time(nc, variable): return 'timeseries-profile-single-ortho-time' if is_timeseries_profile_multi_ortho_time(nc, variable): return 'timeseries-profile-multi-ortho-time' if is_timeseries_profile_ortho_depth(nc, variable): return 'timeseries-profile-ortho-depth' if is_timeseries_profile_incomplete(nc, variable): return 'timeseries-profile-incomplete' if is_trajectory_profile_orthogonal(nc, variable): return 'trajectory-profile-orthogonal' if is_trajectory_profile_incomplete(nc, variable): return 'trajectory-profile-incomplete' if is_2d_regular_grid(nc, variable): return '2d-regular-grid' if is_2d_static_grid(nc, variable): return '2d-static-grid' if is_3d_regular_grid(nc, variable): return '3d-regular-grid' if is_3d_static_grid(nc, variable): return '3d-static-grid' if is_mapped_grid(nc, variable): return 'mapped-grid' if is_reduced_grid(nc, variable): return 'reduced-grid'
[ "def", "guess_feature_type", "(", "nc", ",", "variable", ")", ":", "if", "is_point", "(", "nc", ",", "variable", ")", ":", "return", "'point'", "if", "is_timeseries", "(", "nc", ",", "variable", ")", ":", "return", "'timeseries'", "if", "is_multi_timeseries_...
Returns a string describing the feature type for this variable :param netCDF4.Dataset nc: An open netCDF dataset :param str variable: name of the variable to check
[ "Returns", "a", "string", "describing", "the", "feature", "type", "for", "this", "variable" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cfutil.py#L1658-L1708
train
31,405
ioos/compliance-checker
compliance_checker/cfutil.py
units_convertible
def units_convertible(units1, units2, reftimeistime=True): """ Return True if a Unit representing the string units1 can be converted to a Unit representing the string units2, else False. :param str units1: A string representing the units :param str units2: A string representing the units """ try: u1 = Unit(units1) u2 = Unit(units2) except ValueError: return False return u1.is_convertible(u2)
python
def units_convertible(units1, units2, reftimeistime=True): """ Return True if a Unit representing the string units1 can be converted to a Unit representing the string units2, else False. :param str units1: A string representing the units :param str units2: A string representing the units """ try: u1 = Unit(units1) u2 = Unit(units2) except ValueError: return False return u1.is_convertible(u2)
[ "def", "units_convertible", "(", "units1", ",", "units2", ",", "reftimeistime", "=", "True", ")", ":", "try", ":", "u1", "=", "Unit", "(", "units1", ")", "u2", "=", "Unit", "(", "units2", ")", "except", "ValueError", ":", "return", "False", "return", "...
Return True if a Unit representing the string units1 can be converted to a Unit representing the string units2, else False. :param str units1: A string representing the units :param str units2: A string representing the units
[ "Return", "True", "if", "a", "Unit", "representing", "the", "string", "units1", "can", "be", "converted", "to", "a", "Unit", "representing", "the", "string", "units2", "else", "False", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cfutil.py#L1711-L1724
train
31,406
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.setup
def setup(self, ds): """ Initialize various special variable types within the class. Mutates a number of instance variables. :param netCDF4.Dataset ds: An open netCDF dataset """ self._find_coord_vars(ds) self._find_aux_coord_vars(ds) self._find_ancillary_vars(ds) self._find_clim_vars(ds) self._find_boundary_vars(ds) self._find_metadata_vars(ds) self._find_cf_standard_name_table(ds) self._find_geophysical_vars(ds)
python
def setup(self, ds): """ Initialize various special variable types within the class. Mutates a number of instance variables. :param netCDF4.Dataset ds: An open netCDF dataset """ self._find_coord_vars(ds) self._find_aux_coord_vars(ds) self._find_ancillary_vars(ds) self._find_clim_vars(ds) self._find_boundary_vars(ds) self._find_metadata_vars(ds) self._find_cf_standard_name_table(ds) self._find_geophysical_vars(ds)
[ "def", "setup", "(", "self", ",", "ds", ")", ":", "self", ".", "_find_coord_vars", "(", "ds", ")", "self", ".", "_find_aux_coord_vars", "(", "ds", ")", "self", ".", "_find_ancillary_vars", "(", "ds", ")", "self", ".", "_find_clim_vars", "(", "ds", ")", ...
Initialize various special variable types within the class. Mutates a number of instance variables. :param netCDF4.Dataset ds: An open netCDF dataset
[ "Initialize", "various", "special", "variable", "types", "within", "the", "class", ".", "Mutates", "a", "number", "of", "instance", "variables", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L138-L152
train
31,407
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._find_cf_standard_name_table
def _find_cf_standard_name_table(self, ds): ''' Parse out the `standard_name_vocabulary` attribute and download that version of the cf standard name table. If the standard name table has already been downloaded, use the cached version. Modifies `_std_names` attribute to store standard names. Returns True if the file exists and False if it fails to download. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: bool ''' # Get the standard name vocab standard_name_vocabulary = getattr(ds, 'standard_name_vocabulary', '') # Try to parse this attribute to get version version = None try: if 'cf standard name table' in standard_name_vocabulary.lower(): version = [s.strip('(').strip(')').strip('v').strip(',') for s in standard_name_vocabulary.split()] # This assumes that table version number won't start with 0. version = [s for s in version if s.isdigit() and len(s) <= 2 and not s.startswith('0')] if len(version) > 1: return False else: version = version[0] else: # Can't parse the attribute, use the packaged version return False # usually raised from .lower() with an incompatible (non-string) # data type except AttributeError: warn("Cannot convert standard name table to lowercase. This can " "occur if a non-string standard_name_vocabulary global " "attribute is supplied") return False if version.startswith('v'): # i.e 'v34' -> '34' drop the v version = version[1:] # If the packaged version is what we're after, then we're good if version == self._std_names._version: print("Using packaged standard name table v{0}".format(version), file=sys.stderr) return False # Try to download the version specified try: data_directory = util.create_cached_data_dir() location = os.path.join(data_directory, 'cf-standard-name-table-test-{0}.xml'.format(version)) # Did we already download this before? if not os.path.isfile(location): util.download_cf_standard_name_table(version, location) print("Using downloaded standard name table v{0}".format(version), file=sys.stderr) else: print("Using cached standard name table v{0} from {1}".format(version, location), file=sys.stderr) self._std_names = util.StandardNameTable(location) return True except Exception as e: # There was an error downloading the CF table. That's ok, we'll just use the packaged version warn("Problem fetching standard name table:\n{0}\n" "Using packaged v{1}".format(e, self._std_names._version)) return False
python
def _find_cf_standard_name_table(self, ds): ''' Parse out the `standard_name_vocabulary` attribute and download that version of the cf standard name table. If the standard name table has already been downloaded, use the cached version. Modifies `_std_names` attribute to store standard names. Returns True if the file exists and False if it fails to download. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: bool ''' # Get the standard name vocab standard_name_vocabulary = getattr(ds, 'standard_name_vocabulary', '') # Try to parse this attribute to get version version = None try: if 'cf standard name table' in standard_name_vocabulary.lower(): version = [s.strip('(').strip(')').strip('v').strip(',') for s in standard_name_vocabulary.split()] # This assumes that table version number won't start with 0. version = [s for s in version if s.isdigit() and len(s) <= 2 and not s.startswith('0')] if len(version) > 1: return False else: version = version[0] else: # Can't parse the attribute, use the packaged version return False # usually raised from .lower() with an incompatible (non-string) # data type except AttributeError: warn("Cannot convert standard name table to lowercase. This can " "occur if a non-string standard_name_vocabulary global " "attribute is supplied") return False if version.startswith('v'): # i.e 'v34' -> '34' drop the v version = version[1:] # If the packaged version is what we're after, then we're good if version == self._std_names._version: print("Using packaged standard name table v{0}".format(version), file=sys.stderr) return False # Try to download the version specified try: data_directory = util.create_cached_data_dir() location = os.path.join(data_directory, 'cf-standard-name-table-test-{0}.xml'.format(version)) # Did we already download this before? if not os.path.isfile(location): util.download_cf_standard_name_table(version, location) print("Using downloaded standard name table v{0}".format(version), file=sys.stderr) else: print("Using cached standard name table v{0} from {1}".format(version, location), file=sys.stderr) self._std_names = util.StandardNameTable(location) return True except Exception as e: # There was an error downloading the CF table. That's ok, we'll just use the packaged version warn("Problem fetching standard name table:\n{0}\n" "Using packaged v{1}".format(e, self._std_names._version)) return False
[ "def", "_find_cf_standard_name_table", "(", "self", ",", "ds", ")", ":", "# Get the standard name vocab", "standard_name_vocabulary", "=", "getattr", "(", "ds", ",", "'standard_name_vocabulary'", ",", "''", ")", "# Try to parse this attribute to get version", "version", "="...
Parse out the `standard_name_vocabulary` attribute and download that version of the cf standard name table. If the standard name table has already been downloaded, use the cached version. Modifies `_std_names` attribute to store standard names. Returns True if the file exists and False if it fails to download. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: bool
[ "Parse", "out", "the", "standard_name_vocabulary", "attribute", "and", "download", "that", "version", "of", "the", "cf", "standard", "name", "table", ".", "If", "the", "standard", "name", "table", "has", "already", "been", "downloaded", "use", "the", "cached", ...
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L154-L215
train
31,408
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._find_ancillary_vars
def _find_ancillary_vars(self, ds, refresh=False): ''' Returns a list of variable names that are defined as ancillary variables in the dataset ds. An ancillary variable generally is a metadata container and referenced from other variables via a string reference in an attribute. - via ancillary_variables (3.4) - "grid mapping var" (5.6) - TODO: more? The result is cached by the passed in dataset object inside of this checker. Pass refresh=True to redo the cached value. :param netCDF4.Dataset ds: An open netCDF dataset :param bool refresh: if refresh is set to True, the cache is invalidated. :rtype: list :return: List of variable names (str) that are defined as ancillary variables in the dataset ds. ''' # Used the cached version if it exists and is not empty if self._ancillary_vars.get(ds, None) and refresh is False: return self._ancillary_vars[ds] # Invalidate the cache at all costs self._ancillary_vars[ds] = [] for name, var in ds.variables.items(): if hasattr(var, 'ancillary_variables'): for anc_name in var.ancillary_variables.split(" "): if anc_name in ds.variables: self._ancillary_vars[ds].append(anc_name) if hasattr(var, 'grid_mapping'): gm_name = var.grid_mapping if gm_name in ds.variables: self._ancillary_vars[ds].append(gm_name) return self._ancillary_vars[ds]
python
def _find_ancillary_vars(self, ds, refresh=False): ''' Returns a list of variable names that are defined as ancillary variables in the dataset ds. An ancillary variable generally is a metadata container and referenced from other variables via a string reference in an attribute. - via ancillary_variables (3.4) - "grid mapping var" (5.6) - TODO: more? The result is cached by the passed in dataset object inside of this checker. Pass refresh=True to redo the cached value. :param netCDF4.Dataset ds: An open netCDF dataset :param bool refresh: if refresh is set to True, the cache is invalidated. :rtype: list :return: List of variable names (str) that are defined as ancillary variables in the dataset ds. ''' # Used the cached version if it exists and is not empty if self._ancillary_vars.get(ds, None) and refresh is False: return self._ancillary_vars[ds] # Invalidate the cache at all costs self._ancillary_vars[ds] = [] for name, var in ds.variables.items(): if hasattr(var, 'ancillary_variables'): for anc_name in var.ancillary_variables.split(" "): if anc_name in ds.variables: self._ancillary_vars[ds].append(anc_name) if hasattr(var, 'grid_mapping'): gm_name = var.grid_mapping if gm_name in ds.variables: self._ancillary_vars[ds].append(gm_name) return self._ancillary_vars[ds]
[ "def", "_find_ancillary_vars", "(", "self", ",", "ds", ",", "refresh", "=", "False", ")", ":", "# Used the cached version if it exists and is not empty", "if", "self", ".", "_ancillary_vars", ".", "get", "(", "ds", ",", "None", ")", "and", "refresh", "is", "Fals...
Returns a list of variable names that are defined as ancillary variables in the dataset ds. An ancillary variable generally is a metadata container and referenced from other variables via a string reference in an attribute. - via ancillary_variables (3.4) - "grid mapping var" (5.6) - TODO: more? The result is cached by the passed in dataset object inside of this checker. Pass refresh=True to redo the cached value. :param netCDF4.Dataset ds: An open netCDF dataset :param bool refresh: if refresh is set to True, the cache is invalidated. :rtype: list :return: List of variable names (str) that are defined as ancillary variables in the dataset ds.
[ "Returns", "a", "list", "of", "variable", "names", "that", "are", "defined", "as", "ancillary", "variables", "in", "the", "dataset", "ds", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L259-L300
train
31,409
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._find_metadata_vars
def _find_metadata_vars(self, ds, refresh=False): ''' Returns a list of netCDF variable instances for those that are likely metadata variables :param netCDF4.Dataset ds: An open netCDF dataset :param bool refresh: if refresh is set to True, the cache is invalidated. :rtype: list :return: List of variable names (str) that are likely metadata variable candidates. ''' if self._metadata_vars.get(ds, None) and refresh is False: return self._metadata_vars[ds] self._metadata_vars[ds] = [] for name, var in ds.variables.items(): if name in self._find_ancillary_vars(ds) or name in self._find_coord_vars(ds): continue if name in ('platform_name', 'station_name', 'instrument_name', 'station_id', 'platform_id', 'surface_altitude'): self._metadata_vars[ds].append(name) elif getattr(var, 'cf_role', '') != '': self._metadata_vars[ds].append(name) elif getattr(var, 'standard_name', None) is None and len(var.dimensions) == 0: self._metadata_vars[ds].append(name) return self._metadata_vars[ds]
python
def _find_metadata_vars(self, ds, refresh=False): ''' Returns a list of netCDF variable instances for those that are likely metadata variables :param netCDF4.Dataset ds: An open netCDF dataset :param bool refresh: if refresh is set to True, the cache is invalidated. :rtype: list :return: List of variable names (str) that are likely metadata variable candidates. ''' if self._metadata_vars.get(ds, None) and refresh is False: return self._metadata_vars[ds] self._metadata_vars[ds] = [] for name, var in ds.variables.items(): if name in self._find_ancillary_vars(ds) or name in self._find_coord_vars(ds): continue if name in ('platform_name', 'station_name', 'instrument_name', 'station_id', 'platform_id', 'surface_altitude'): self._metadata_vars[ds].append(name) elif getattr(var, 'cf_role', '') != '': self._metadata_vars[ds].append(name) elif getattr(var, 'standard_name', None) is None and len(var.dimensions) == 0: self._metadata_vars[ds].append(name) return self._metadata_vars[ds]
[ "def", "_find_metadata_vars", "(", "self", ",", "ds", ",", "refresh", "=", "False", ")", ":", "if", "self", ".", "_metadata_vars", ".", "get", "(", "ds", ",", "None", ")", "and", "refresh", "is", "False", ":", "return", "self", ".", "_metadata_vars", "...
Returns a list of netCDF variable instances for those that are likely metadata variables :param netCDF4.Dataset ds: An open netCDF dataset :param bool refresh: if refresh is set to True, the cache is invalidated. :rtype: list :return: List of variable names (str) that are likely metadata variable candidates.
[ "Returns", "a", "list", "of", "netCDF", "variable", "instances", "for", "those", "that", "are", "likely", "metadata", "variables" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L302-L332
train
31,410
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._find_geophysical_vars
def _find_geophysical_vars(self, ds, refresh=False): ''' Returns a list of geophysical variables. Modifies `self._geophysical_vars` :param netCDF4.Dataset ds: An open netCDF dataset :param bool refresh: if refresh is set to True, the cache is invalidated. :rtype: list :return: A list containing strings with geophysical variable names. ''' if self._geophysical_vars.get(ds, None) and refresh is False: return self._geophysical_vars[ds] self._geophysical_vars[ds] = cfutil.get_geophysical_variables(ds) return self._geophysical_vars[ds]
python
def _find_geophysical_vars(self, ds, refresh=False): ''' Returns a list of geophysical variables. Modifies `self._geophysical_vars` :param netCDF4.Dataset ds: An open netCDF dataset :param bool refresh: if refresh is set to True, the cache is invalidated. :rtype: list :return: A list containing strings with geophysical variable names. ''' if self._geophysical_vars.get(ds, None) and refresh is False: return self._geophysical_vars[ds] self._geophysical_vars[ds] = cfutil.get_geophysical_variables(ds) return self._geophysical_vars[ds]
[ "def", "_find_geophysical_vars", "(", "self", ",", "ds", ",", "refresh", "=", "False", ")", ":", "if", "self", ".", "_geophysical_vars", ".", "get", "(", "ds", ",", "None", ")", "and", "refresh", "is", "False", ":", "return", "self", ".", "_geophysical_v...
Returns a list of geophysical variables. Modifies `self._geophysical_vars` :param netCDF4.Dataset ds: An open netCDF dataset :param bool refresh: if refresh is set to True, the cache is invalidated. :rtype: list :return: A list containing strings with geophysical variable names.
[ "Returns", "a", "list", "of", "geophysical", "variables", ".", "Modifies", "self", ".", "_geophysical_vars" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L334-L351
train
31,411
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._find_boundary_vars
def _find_boundary_vars(self, ds, refresh=False): ''' Returns dictionary of boundary variables mapping the variable instance to the name of the variable acting as a boundary variable. :param netCDF4.Dataset ds: An open netCDF dataset :param bool refresh: if refresh is set to True, the cache is invalidated. :rtype: list :return: A list containing strings with boundary variable names. ''' if self._boundary_vars.get(ds, None) and refresh is False: return self._boundary_vars[ds] self._boundary_vars[ds] = cfutil.get_cell_boundary_variables(ds) return self._boundary_vars[ds]
python
def _find_boundary_vars(self, ds, refresh=False): ''' Returns dictionary of boundary variables mapping the variable instance to the name of the variable acting as a boundary variable. :param netCDF4.Dataset ds: An open netCDF dataset :param bool refresh: if refresh is set to True, the cache is invalidated. :rtype: list :return: A list containing strings with boundary variable names. ''' if self._boundary_vars.get(ds, None) and refresh is False: return self._boundary_vars[ds] self._boundary_vars[ds] = cfutil.get_cell_boundary_variables(ds) return self._boundary_vars[ds]
[ "def", "_find_boundary_vars", "(", "self", ",", "ds", ",", "refresh", "=", "False", ")", ":", "if", "self", ".", "_boundary_vars", ".", "get", "(", "ds", ",", "None", ")", "and", "refresh", "is", "False", ":", "return", "self", ".", "_boundary_vars", "...
Returns dictionary of boundary variables mapping the variable instance to the name of the variable acting as a boundary variable. :param netCDF4.Dataset ds: An open netCDF dataset :param bool refresh: if refresh is set to True, the cache is invalidated. :rtype: list :return: A list containing strings with boundary variable names.
[ "Returns", "dictionary", "of", "boundary", "variables", "mapping", "the", "variable", "instance", "to", "the", "name", "of", "the", "variable", "acting", "as", "a", "boundary", "variable", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L374-L390
train
31,412
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_data_types
def check_data_types(self, ds): ''' Checks the data type of all netCDF variables to ensure they are valid data types under CF. CF §2.2 The netCDF data types char, byte, short, int, float or real, and double are all acceptable :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result ''' fails = [] total = len(ds.variables) for k, v in ds.variables.items(): if (v.dtype.kind != 'S' and all(v.dtype.type != t for t in (np.character, np.dtype('|S1'), np.dtype('b'), np.dtype('i2'), np.dtype('i4'), np.float32, np.double))): fails.append('The variable {} failed because the datatype is {}'.format(k, v.datatype)) return Result(BaseCheck.HIGH, (total - len(fails), total), self.section_titles["2.2"], msgs=fails)
python
def check_data_types(self, ds): ''' Checks the data type of all netCDF variables to ensure they are valid data types under CF. CF §2.2 The netCDF data types char, byte, short, int, float or real, and double are all acceptable :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result ''' fails = [] total = len(ds.variables) for k, v in ds.variables.items(): if (v.dtype.kind != 'S' and all(v.dtype.type != t for t in (np.character, np.dtype('|S1'), np.dtype('b'), np.dtype('i2'), np.dtype('i4'), np.float32, np.double))): fails.append('The variable {} failed because the datatype is {}'.format(k, v.datatype)) return Result(BaseCheck.HIGH, (total - len(fails), total), self.section_titles["2.2"], msgs=fails)
[ "def", "check_data_types", "(", "self", ",", "ds", ")", ":", "fails", "=", "[", "]", "total", "=", "len", "(", "ds", ".", "variables", ")", "for", "k", ",", "v", "in", "ds", ".", "variables", ".", "items", "(", ")", ":", "if", "(", "v", ".", ...
Checks the data type of all netCDF variables to ensure they are valid data types under CF. CF §2.2 The netCDF data types char, byte, short, int, float or real, and double are all acceptable :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result
[ "Checks", "the", "data", "type", "of", "all", "netCDF", "variables", "to", "ensure", "they", "are", "valid", "data", "types", "under", "CF", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L398-L419
train
31,413
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_naming_conventions
def check_naming_conventions(self, ds): ''' Checks the variable names to ensure they are valid CF variable names under CF. CF §2.3 Variable, dimension and attribute names should begin with a letter and be composed of letters, digits, and underscores. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result ''' ret_val = [] variable_naming = TestCtx(BaseCheck.MEDIUM, self.section_titles['2.3']) dimension_naming = TestCtx(BaseCheck.MEDIUM, self.section_titles['2.3']) attribute_naming = TestCtx(BaseCheck.MEDIUM, self.section_titles['2.3']) ignore_attributes = [ '_FillValue', 'DODS', '_ChunkSizes', '_Coordinate', '_Unsigned' ] rname = regex.compile("^[A-Za-z][A-Za-z0-9_]*$") for name, variable in ds.variables.items(): variable_naming.assert_true(rname.match(name) is not None, "variable {} should begin with a letter and be composed of " "letters, digits, and underscores".format(name)) # Keep track of all the attributes, we'll need to check them for attr in variable.ncattrs(): if attr in ignore_attributes: continue # Special attributes made by THREDDS if attr.startswith('DODS'): continue # Ignore model produced attributes if attr.startswith('_Coordinate'): continue attribute_naming.assert_true(rname.match(attr) is not None, "attribute {}:{} should begin with a letter and be composed of " "letters, digits, and underscores".format(name, attr)) ret_val.append(variable_naming.to_result()) for dimension in ds.dimensions: dimension_naming.assert_true(rname.match(dimension) is not None, "dimension {} should begin with a latter and be composed of " "letters, digits, and underscores".format(dimension)) ret_val.append(dimension_naming.to_result()) for global_attr in ds.ncattrs(): if global_attr.startswith('DODS'): continue attribute_naming.assert_true(rname.match(global_attr) is not None, "global attribute {} should begin with a letter and be composed of " "letters, digits, and underscores".format(global_attr)) ret_val.append(attribute_naming.to_result()) return ret_val
python
def check_naming_conventions(self, ds): ''' Checks the variable names to ensure they are valid CF variable names under CF. CF §2.3 Variable, dimension and attribute names should begin with a letter and be composed of letters, digits, and underscores. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result ''' ret_val = [] variable_naming = TestCtx(BaseCheck.MEDIUM, self.section_titles['2.3']) dimension_naming = TestCtx(BaseCheck.MEDIUM, self.section_titles['2.3']) attribute_naming = TestCtx(BaseCheck.MEDIUM, self.section_titles['2.3']) ignore_attributes = [ '_FillValue', 'DODS', '_ChunkSizes', '_Coordinate', '_Unsigned' ] rname = regex.compile("^[A-Za-z][A-Za-z0-9_]*$") for name, variable in ds.variables.items(): variable_naming.assert_true(rname.match(name) is not None, "variable {} should begin with a letter and be composed of " "letters, digits, and underscores".format(name)) # Keep track of all the attributes, we'll need to check them for attr in variable.ncattrs(): if attr in ignore_attributes: continue # Special attributes made by THREDDS if attr.startswith('DODS'): continue # Ignore model produced attributes if attr.startswith('_Coordinate'): continue attribute_naming.assert_true(rname.match(attr) is not None, "attribute {}:{} should begin with a letter and be composed of " "letters, digits, and underscores".format(name, attr)) ret_val.append(variable_naming.to_result()) for dimension in ds.dimensions: dimension_naming.assert_true(rname.match(dimension) is not None, "dimension {} should begin with a latter and be composed of " "letters, digits, and underscores".format(dimension)) ret_val.append(dimension_naming.to_result()) for global_attr in ds.ncattrs(): if global_attr.startswith('DODS'): continue attribute_naming.assert_true(rname.match(global_attr) is not None, "global attribute {} should begin with a letter and be composed of " "letters, digits, and underscores".format(global_attr)) ret_val.append(attribute_naming.to_result()) return ret_val
[ "def", "check_naming_conventions", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "variable_naming", "=", "TestCtx", "(", "BaseCheck", ".", "MEDIUM", ",", "self", ".", "section_titles", "[", "'2.3'", "]", ")", "dimension_naming", "=", "TestCtx",...
Checks the variable names to ensure they are valid CF variable names under CF. CF §2.3 Variable, dimension and attribute names should begin with a letter and be composed of letters, digits, and underscores. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result
[ "Checks", "the", "variable", "names", "to", "ensure", "they", "are", "valid", "CF", "variable", "names", "under", "CF", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L462-L522
train
31,414
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_names_unique
def check_names_unique(self, ds): ''' Checks the variable names for uniqueness regardless of case. CF §2.3 names should not be distinguished purely by case, i.e., if case is disregarded, no two names should be the same. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result ''' fails = [] total = len(ds.variables) names = defaultdict(int) for k in ds.variables: names[k.lower()] += 1 fails = ['Variables are not case sensitive. Duplicate variables named: %s' % k for k, v in names.items() if v > 1] return Result(BaseCheck.MEDIUM, (total - len(fails), total), self.section_titles['2.3'], msgs=fails)
python
def check_names_unique(self, ds): ''' Checks the variable names for uniqueness regardless of case. CF §2.3 names should not be distinguished purely by case, i.e., if case is disregarded, no two names should be the same. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result ''' fails = [] total = len(ds.variables) names = defaultdict(int) for k in ds.variables: names[k.lower()] += 1 fails = ['Variables are not case sensitive. Duplicate variables named: %s' % k for k, v in names.items() if v > 1] return Result(BaseCheck.MEDIUM, (total - len(fails), total), self.section_titles['2.3'], msgs=fails)
[ "def", "check_names_unique", "(", "self", ",", "ds", ")", ":", "fails", "=", "[", "]", "total", "=", "len", "(", "ds", ".", "variables", ")", "names", "=", "defaultdict", "(", "int", ")", "for", "k", "in", "ds", ".", "variables", ":", "names", "[",...
Checks the variable names for uniqueness regardless of case. CF §2.3 names should not be distinguished purely by case, i.e., if case is disregarded, no two names should be the same. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result
[ "Checks", "the", "variable", "names", "for", "uniqueness", "regardless", "of", "case", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L524-L542
train
31,415
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_dimension_names
def check_dimension_names(self, ds): ''' Checks variables contain no duplicate dimension names. CF §2.4 A variable may have any number of dimensions, including zero, and the dimensions must all have different names. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result ''' fails = [] total = len(ds.variables) for k, v in ds.variables.items(): dims = defaultdict(int) for d in v.dimensions: dims[d] += 1 for dimension, count in dims.items(): if count > 1: fails.append("%s has two or more dimensions named %s" % (k, dimension)) return Result(BaseCheck.HIGH, (total - len(fails), total), self.section_titles['2.4'], msgs=fails)
python
def check_dimension_names(self, ds): ''' Checks variables contain no duplicate dimension names. CF §2.4 A variable may have any number of dimensions, including zero, and the dimensions must all have different names. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result ''' fails = [] total = len(ds.variables) for k, v in ds.variables.items(): dims = defaultdict(int) for d in v.dimensions: dims[d] += 1 for dimension, count in dims.items(): if count > 1: fails.append("%s has two or more dimensions named %s" % (k, dimension)) return Result(BaseCheck.HIGH, (total - len(fails), total), self.section_titles['2.4'], msgs=fails)
[ "def", "check_dimension_names", "(", "self", ",", "ds", ")", ":", "fails", "=", "[", "]", "total", "=", "len", "(", "ds", ".", "variables", ")", "for", "k", ",", "v", "in", "ds", ".", "variables", ".", "items", "(", ")", ":", "dims", "=", "defaul...
Checks variables contain no duplicate dimension names. CF §2.4 A variable may have any number of dimensions, including zero, and the dimensions must all have different names. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result
[ "Checks", "variables", "contain", "no", "duplicate", "dimension", "names", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L544-L566
train
31,416
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._get_coord_axis_map
def _get_coord_axis_map(self, ds): ''' Returns a dictionary mapping each coordinate to a letter identifier describing the _kind_ of coordinate. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: dict :return: A dictionary with variable names mapped to axis abbreviations, i.e. {'longitude': 'X', ... 'pressure': 'Z'} ''' expected = ['T', 'Z', 'Y', 'X'] coord_vars = self._find_coord_vars(ds) coord_axis_map = {} # L - Unlimited Coordinates # T - Time coordinates # Z - Depth/Altitude Coordinate # Y - Y-Coordinate (latitude) # X - X-Coordinate (longitude) # A - Auxiliary Coordinate # I - Instance Coordinate time_variables = cfutil.get_time_variables(ds) lat_variables = cfutil.get_latitude_variables(ds) lon_variables = cfutil.get_longitude_variables(ds) z_variables = cfutil.get_z_variables(ds) for coord_name in coord_vars: coord_var = ds.variables[coord_name] axis = getattr(coord_var, 'axis', None) standard_name = getattr(coord_var, 'standard_name', None) # Unlimited dimensions must come first if ds.dimensions[coord_name].isunlimited(): coord_axis_map[coord_name] = 'L' # axis takes precedence over standard_name elif axis in expected: coord_axis_map[coord_name] = axis elif standard_name == 'time': coord_axis_map[coord_name] = 'T' elif standard_name == 'longitude': coord_axis_map[coord_name] = 'X' elif standard_name == 'latitude': coord_axis_map[coord_name] = 'Y' elif standard_name in ['height', 'depth', 'altitude']: coord_axis_map[coord_name] = 'Z' elif cfutil.is_compression_coordinate(ds, coord_name): coord_axis_map[coord_name] = 'C' elif coord_name in time_variables: coord_axis_map[coord_name] = 'T' elif coord_name in z_variables: coord_axis_map[coord_name] = 'Z' elif coord_name in lat_variables: coord_axis_map[coord_name] = 'Y' elif coord_name in lon_variables: coord_axis_map[coord_name] = 'X' else: # mark the coordinate variable as unknown coord_axis_map[coord_name] = 'U' for dimension in self._get_instance_dimensions(ds): if dimension not in coord_axis_map: coord_axis_map[dimension] = 'I' # Dimensions of auxiliary coordinate variables will be marked with A. # This is useful to help determine if the dimensions are used like a # mapping from grid coordinates to physical lat/lon for coord_name in self._find_aux_coord_vars(ds): coord_var = ds.variables[coord_name] # Skip label auxiliary coordinates if coord_var.dtype.char == 'S': continue for dimension in coord_var.dimensions: if dimension not in coord_axis_map: coord_axis_map[dimension] = 'A' # If a dimension does not have a coordinate variable mark it as unknown # 'U' for dimension in ds.dimensions: if dimension not in coord_axis_map: coord_axis_map[dimension] = 'U' return coord_axis_map
python
def _get_coord_axis_map(self, ds): ''' Returns a dictionary mapping each coordinate to a letter identifier describing the _kind_ of coordinate. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: dict :return: A dictionary with variable names mapped to axis abbreviations, i.e. {'longitude': 'X', ... 'pressure': 'Z'} ''' expected = ['T', 'Z', 'Y', 'X'] coord_vars = self._find_coord_vars(ds) coord_axis_map = {} # L - Unlimited Coordinates # T - Time coordinates # Z - Depth/Altitude Coordinate # Y - Y-Coordinate (latitude) # X - X-Coordinate (longitude) # A - Auxiliary Coordinate # I - Instance Coordinate time_variables = cfutil.get_time_variables(ds) lat_variables = cfutil.get_latitude_variables(ds) lon_variables = cfutil.get_longitude_variables(ds) z_variables = cfutil.get_z_variables(ds) for coord_name in coord_vars: coord_var = ds.variables[coord_name] axis = getattr(coord_var, 'axis', None) standard_name = getattr(coord_var, 'standard_name', None) # Unlimited dimensions must come first if ds.dimensions[coord_name].isunlimited(): coord_axis_map[coord_name] = 'L' # axis takes precedence over standard_name elif axis in expected: coord_axis_map[coord_name] = axis elif standard_name == 'time': coord_axis_map[coord_name] = 'T' elif standard_name == 'longitude': coord_axis_map[coord_name] = 'X' elif standard_name == 'latitude': coord_axis_map[coord_name] = 'Y' elif standard_name in ['height', 'depth', 'altitude']: coord_axis_map[coord_name] = 'Z' elif cfutil.is_compression_coordinate(ds, coord_name): coord_axis_map[coord_name] = 'C' elif coord_name in time_variables: coord_axis_map[coord_name] = 'T' elif coord_name in z_variables: coord_axis_map[coord_name] = 'Z' elif coord_name in lat_variables: coord_axis_map[coord_name] = 'Y' elif coord_name in lon_variables: coord_axis_map[coord_name] = 'X' else: # mark the coordinate variable as unknown coord_axis_map[coord_name] = 'U' for dimension in self._get_instance_dimensions(ds): if dimension not in coord_axis_map: coord_axis_map[dimension] = 'I' # Dimensions of auxiliary coordinate variables will be marked with A. # This is useful to help determine if the dimensions are used like a # mapping from grid coordinates to physical lat/lon for coord_name in self._find_aux_coord_vars(ds): coord_var = ds.variables[coord_name] # Skip label auxiliary coordinates if coord_var.dtype.char == 'S': continue for dimension in coord_var.dimensions: if dimension not in coord_axis_map: coord_axis_map[dimension] = 'A' # If a dimension does not have a coordinate variable mark it as unknown # 'U' for dimension in ds.dimensions: if dimension not in coord_axis_map: coord_axis_map[dimension] = 'U' return coord_axis_map
[ "def", "_get_coord_axis_map", "(", "self", ",", "ds", ")", ":", "expected", "=", "[", "'T'", ",", "'Z'", ",", "'Y'", ",", "'X'", "]", "coord_vars", "=", "self", ".", "_find_coord_vars", "(", "ds", ")", "coord_axis_map", "=", "{", "}", "# L - Unlimited Co...
Returns a dictionary mapping each coordinate to a letter identifier describing the _kind_ of coordinate. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: dict :return: A dictionary with variable names mapped to axis abbreviations, i.e. {'longitude': 'X', ... 'pressure': 'Z'}
[ "Returns", "a", "dictionary", "mapping", "each", "coordinate", "to", "a", "letter", "identifier", "describing", "the", "_kind_", "of", "coordinate", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L613-L696
train
31,417
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._get_instance_dimensions
def _get_instance_dimensions(self, ds): ''' Returns a list of dimensions marked as instance dimensions :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :returns: A list of variable dimensions ''' ret_val = [] for variable in ds.get_variables_by_attributes(cf_role=lambda x: isinstance(x, basestring)): if variable.ndim > 0: ret_val.append(variable.dimensions[0]) return ret_val
python
def _get_instance_dimensions(self, ds): ''' Returns a list of dimensions marked as instance dimensions :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :returns: A list of variable dimensions ''' ret_val = [] for variable in ds.get_variables_by_attributes(cf_role=lambda x: isinstance(x, basestring)): if variable.ndim > 0: ret_val.append(variable.dimensions[0]) return ret_val
[ "def", "_get_instance_dimensions", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "for", "variable", "in", "ds", ".", "get_variables_by_attributes", "(", "cf_role", "=", "lambda", "x", ":", "isinstance", "(", "x", ",", "basestring", ")", ")", ...
Returns a list of dimensions marked as instance dimensions :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :returns: A list of variable dimensions
[ "Returns", "a", "list", "of", "dimensions", "marked", "as", "instance", "dimensions" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L698-L711
train
31,418
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._get_pretty_dimension_order
def _get_pretty_dimension_order(self, ds, name): ''' Returns a comma seperated string of the dimensions for a specified variable :param netCDF4.Dataset ds: An open netCDF dataset :param str name: A string with a valid NetCDF variable name for the dataset :rtype: str :return: A comma separated string of the variable's dimensions ''' dim_names = [] for dim in ds.variables[name].dimensions: dim_name = dim if ds.dimensions[dim].isunlimited(): dim_name += ' (Unlimited)' dim_names.append(dim_name) return ', '.join(dim_names)
python
def _get_pretty_dimension_order(self, ds, name): ''' Returns a comma seperated string of the dimensions for a specified variable :param netCDF4.Dataset ds: An open netCDF dataset :param str name: A string with a valid NetCDF variable name for the dataset :rtype: str :return: A comma separated string of the variable's dimensions ''' dim_names = [] for dim in ds.variables[name].dimensions: dim_name = dim if ds.dimensions[dim].isunlimited(): dim_name += ' (Unlimited)' dim_names.append(dim_name) return ', '.join(dim_names)
[ "def", "_get_pretty_dimension_order", "(", "self", ",", "ds", ",", "name", ")", ":", "dim_names", "=", "[", "]", "for", "dim", "in", "ds", ".", "variables", "[", "name", "]", ".", "dimensions", ":", "dim_name", "=", "dim", "if", "ds", ".", "dimensions"...
Returns a comma seperated string of the dimensions for a specified variable :param netCDF4.Dataset ds: An open netCDF dataset :param str name: A string with a valid NetCDF variable name for the dataset :rtype: str :return: A comma separated string of the variable's dimensions
[ "Returns", "a", "comma", "seperated", "string", "of", "the", "dimensions", "for", "a", "specified", "variable" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L713-L730
train
31,419
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._get_dimension_order
def _get_dimension_order(self, ds, name, coord_axis_map): ''' Returns a list of strings corresponding to the named axis of the dimensions for a variable. Example:: self._get_dimension_order(ds, 'temperature', coord_axis_map) --> ['T', 'Y', 'X'] :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Name of the variable :param dict coord_axis_map: A dictionary mapping each coordinate variable and dimension to a named axis :rtype: list :return: A list of strings corresponding to the named axis of the dimensions for a variable ''' retval = [] variable = ds.variables[name] for dim in variable.dimensions: retval.append(coord_axis_map[dim]) return retval
python
def _get_dimension_order(self, ds, name, coord_axis_map): ''' Returns a list of strings corresponding to the named axis of the dimensions for a variable. Example:: self._get_dimension_order(ds, 'temperature', coord_axis_map) --> ['T', 'Y', 'X'] :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Name of the variable :param dict coord_axis_map: A dictionary mapping each coordinate variable and dimension to a named axis :rtype: list :return: A list of strings corresponding to the named axis of the dimensions for a variable ''' retval = [] variable = ds.variables[name] for dim in variable.dimensions: retval.append(coord_axis_map[dim]) return retval
[ "def", "_get_dimension_order", "(", "self", ",", "ds", ",", "name", ",", "coord_axis_map", ")", ":", "retval", "=", "[", "]", "variable", "=", "ds", ".", "variables", "[", "name", "]", "for", "dim", "in", "variable", ".", "dimensions", ":", "retval", "...
Returns a list of strings corresponding to the named axis of the dimensions for a variable. Example:: self._get_dimension_order(ds, 'temperature', coord_axis_map) --> ['T', 'Y', 'X'] :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Name of the variable :param dict coord_axis_map: A dictionary mapping each coordinate variable and dimension to a named axis :rtype: list :return: A list of strings corresponding to the named axis of the dimensions for a variable
[ "Returns", "a", "list", "of", "strings", "corresponding", "to", "the", "named", "axis", "of", "the", "dimensions", "for", "a", "variable", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L732-L752
train
31,420
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_convention_globals
def check_convention_globals(self, ds): ''' Check the common global attributes are strings if they exist. CF §2.6.2 title/history global attributes, must be strings. Do not need to exist. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of Results ''' attrs = ['title', 'history'] valid_globals = TestCtx(BaseCheck.MEDIUM, self.section_titles['2.6']) for attr in attrs: dataset_attr = getattr(ds, attr, None) is_string = isinstance(dataset_attr, basestring) valid_globals.assert_true(is_string and len(dataset_attr), "§2.6.2 global attribute {} should exist and be a non-empty string" # subsection message "".format(attr)) return valid_globals.to_result()
python
def check_convention_globals(self, ds): ''' Check the common global attributes are strings if they exist. CF §2.6.2 title/history global attributes, must be strings. Do not need to exist. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of Results ''' attrs = ['title', 'history'] valid_globals = TestCtx(BaseCheck.MEDIUM, self.section_titles['2.6']) for attr in attrs: dataset_attr = getattr(ds, attr, None) is_string = isinstance(dataset_attr, basestring) valid_globals.assert_true(is_string and len(dataset_attr), "§2.6.2 global attribute {} should exist and be a non-empty string" # subsection message "".format(attr)) return valid_globals.to_result()
[ "def", "check_convention_globals", "(", "self", ",", "ds", ")", ":", "attrs", "=", "[", "'title'", ",", "'history'", "]", "valid_globals", "=", "TestCtx", "(", "BaseCheck", ".", "MEDIUM", ",", "self", ".", "section_titles", "[", "'2.6'", "]", ")", "for", ...
Check the common global attributes are strings if they exist. CF §2.6.2 title/history global attributes, must be strings. Do not need to exist. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of Results
[ "Check", "the", "common", "global", "attributes", "are", "strings", "if", "they", "exist", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L850-L871
train
31,421
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._split_standard_name
def _split_standard_name(self, standard_name): ''' Returns a tuple of the standard_name and standard_name modifier Nones are used to represent the absence of a modifier or standard_name :rtype: tuple :return: 2-tuple of standard_name and modifier as strings ''' if isinstance(standard_name, basestring) and ' ' in standard_name: return standard_name.split(' ', 1) # if this isn't a string, then it doesn't make sense to split # -- treat value as standard name with no modifier else: return standard_name, None
python
def _split_standard_name(self, standard_name): ''' Returns a tuple of the standard_name and standard_name modifier Nones are used to represent the absence of a modifier or standard_name :rtype: tuple :return: 2-tuple of standard_name and modifier as strings ''' if isinstance(standard_name, basestring) and ' ' in standard_name: return standard_name.split(' ', 1) # if this isn't a string, then it doesn't make sense to split # -- treat value as standard name with no modifier else: return standard_name, None
[ "def", "_split_standard_name", "(", "self", ",", "standard_name", ")", ":", "if", "isinstance", "(", "standard_name", ",", "basestring", ")", "and", "' '", "in", "standard_name", ":", "return", "standard_name", ".", "split", "(", "' '", ",", "1", ")", "# if ...
Returns a tuple of the standard_name and standard_name modifier Nones are used to represent the absence of a modifier or standard_name :rtype: tuple :return: 2-tuple of standard_name and modifier as strings
[ "Returns", "a", "tuple", "of", "the", "standard_name", "and", "standard_name", "modifier" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L994-L1009
train
31,422
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._check_valid_cf_units
def _check_valid_cf_units(self, ds, variable_name): ''' Checks that the variable contains units attribute, the attribute is a string and the value is not deprecated by CF :param netCDF4.Dataset ds: An open netCDF dataset :param str variable_name: Name of the variable to be checked :rtype: :return: List of results ''' # This list is straight from section 3 deprecated = ['level', 'layer', 'sigma_level'] variable = ds.variables[variable_name] units = getattr(variable, 'units', None) standard_name_full = getattr(variable, 'standard_name', None) standard_name, standard_name_modifier = self._split_standard_name(standard_name_full) std_name_units_dimensionless = cfutil.is_dimensionless_standard_name(self._std_names._root, standard_name) # Is this even in the database? also, if there is no standard_name, # there's no way to know if it is dimensionless. should_be_dimensionless = (variable.dtype.char == 'S' or std_name_units_dimensionless or standard_name is None) # 1) Units must exist valid_units = TestCtx(BaseCheck.HIGH, self.section_titles['3.1']) valid_units.assert_true(should_be_dimensionless or units is not None, 'units attribute is required for {} when variable is not a dimensionless quantity'.format(variable_name)) # Don't bother checking the rest if units is None and not should_be_dimensionless: return valid_units.to_result() # 2) units attribute must be a string valid_units.assert_true(should_be_dimensionless or isinstance(units, basestring), 'units attribute for {} needs to be a string'.format(variable_name)) # 3) units are not deprecated valid_units.assert_true(units not in deprecated, 'units for {}, "{}" are deprecated by CF 1.6'.format(variable_name, units)) return valid_units.to_result()
python
def _check_valid_cf_units(self, ds, variable_name): ''' Checks that the variable contains units attribute, the attribute is a string and the value is not deprecated by CF :param netCDF4.Dataset ds: An open netCDF dataset :param str variable_name: Name of the variable to be checked :rtype: :return: List of results ''' # This list is straight from section 3 deprecated = ['level', 'layer', 'sigma_level'] variable = ds.variables[variable_name] units = getattr(variable, 'units', None) standard_name_full = getattr(variable, 'standard_name', None) standard_name, standard_name_modifier = self._split_standard_name(standard_name_full) std_name_units_dimensionless = cfutil.is_dimensionless_standard_name(self._std_names._root, standard_name) # Is this even in the database? also, if there is no standard_name, # there's no way to know if it is dimensionless. should_be_dimensionless = (variable.dtype.char == 'S' or std_name_units_dimensionless or standard_name is None) # 1) Units must exist valid_units = TestCtx(BaseCheck.HIGH, self.section_titles['3.1']) valid_units.assert_true(should_be_dimensionless or units is not None, 'units attribute is required for {} when variable is not a dimensionless quantity'.format(variable_name)) # Don't bother checking the rest if units is None and not should_be_dimensionless: return valid_units.to_result() # 2) units attribute must be a string valid_units.assert_true(should_be_dimensionless or isinstance(units, basestring), 'units attribute for {} needs to be a string'.format(variable_name)) # 3) units are not deprecated valid_units.assert_true(units not in deprecated, 'units for {}, "{}" are deprecated by CF 1.6'.format(variable_name, units)) return valid_units.to_result()
[ "def", "_check_valid_cf_units", "(", "self", ",", "ds", ",", "variable_name", ")", ":", "# This list is straight from section 3", "deprecated", "=", "[", "'level'", ",", "'layer'", ",", "'sigma_level'", "]", "variable", "=", "ds", ".", "variables", "[", "variable_...
Checks that the variable contains units attribute, the attribute is a string and the value is not deprecated by CF :param netCDF4.Dataset ds: An open netCDF dataset :param str variable_name: Name of the variable to be checked :rtype: :return: List of results
[ "Checks", "that", "the", "variable", "contains", "units", "attribute", "the", "attribute", "is", "a", "string", "and", "the", "value", "is", "not", "deprecated", "by", "CF" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1011-L1053
train
31,423
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._check_valid_udunits
def _check_valid_udunits(self, ds, variable_name): ''' Checks that the variable's units are contained in UDUnits :param netCDF4.Dataset ds: An open netCDF dataset :param str variable_name: Name of the variable to be checked ''' variable = ds.variables[variable_name] units = getattr(variable, 'units', None) standard_name = getattr(variable, 'standard_name', None) standard_name, standard_name_modifier = self._split_standard_name(standard_name) std_name_units_dimensionless = cfutil.is_dimensionless_standard_name(self._std_names._root, standard_name) # If the variable is supposed to be dimensionless, it automatically passes should_be_dimensionless = (variable.dtype.char == 'S' or std_name_units_dimensionless) valid_udunits = TestCtx(BaseCheck.HIGH, self.section_titles["3.1"]) are_udunits = (units is not None and util.units_known(units)) valid_udunits.assert_true(should_be_dimensionless or are_udunits, 'units for {}, "{}" are not recognized by UDUNITS'.format(variable_name, units)) return valid_udunits.to_result()
python
def _check_valid_udunits(self, ds, variable_name): ''' Checks that the variable's units are contained in UDUnits :param netCDF4.Dataset ds: An open netCDF dataset :param str variable_name: Name of the variable to be checked ''' variable = ds.variables[variable_name] units = getattr(variable, 'units', None) standard_name = getattr(variable, 'standard_name', None) standard_name, standard_name_modifier = self._split_standard_name(standard_name) std_name_units_dimensionless = cfutil.is_dimensionless_standard_name(self._std_names._root, standard_name) # If the variable is supposed to be dimensionless, it automatically passes should_be_dimensionless = (variable.dtype.char == 'S' or std_name_units_dimensionless) valid_udunits = TestCtx(BaseCheck.HIGH, self.section_titles["3.1"]) are_udunits = (units is not None and util.units_known(units)) valid_udunits.assert_true(should_be_dimensionless or are_udunits, 'units for {}, "{}" are not recognized by UDUNITS'.format(variable_name, units)) return valid_udunits.to_result()
[ "def", "_check_valid_udunits", "(", "self", ",", "ds", ",", "variable_name", ")", ":", "variable", "=", "ds", ".", "variables", "[", "variable_name", "]", "units", "=", "getattr", "(", "variable", ",", "'units'", ",", "None", ")", "standard_name", "=", "ge...
Checks that the variable's units are contained in UDUnits :param netCDF4.Dataset ds: An open netCDF dataset :param str variable_name: Name of the variable to be checked
[ "Checks", "that", "the", "variable", "s", "units", "are", "contained", "in", "UDUnits" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1055-L1078
train
31,424
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._check_valid_standard_units
def _check_valid_standard_units(self, ds, variable_name): ''' Checks that the variable's units are appropriate for the standard name according to the CF standard name table and coordinate sections in CF 1.6 :param netCDF4.Dataset ds: An open netCDF dataset :param str variable_name: Name of the variable to be checked ''' variable = ds.variables[variable_name] units = getattr(variable, 'units', None) standard_name = getattr(variable, 'standard_name', None) valid_standard_units = TestCtx(BaseCheck.HIGH, self.section_titles["3.1"]) # If the variable is supposed to be dimensionless, it automatically passes std_name_units_dimensionless = cfutil.is_dimensionless_standard_name(self._std_names._root, standard_name) standard_name, standard_name_modifier = self._split_standard_name(standard_name) standard_entry = self._std_names.get(standard_name, None) if standard_entry is not None: canonical_units = standard_entry.canonical_units else: # Any unit comparisons with None returns False canonical_units = None # Other standard_name modifiers have the same units as the # unmodified standard name or are not checked for units. if standard_name_modifier == 'number_of_observations': canonical_units = '1' # This section represents the different cases where simple udunits # comparison isn't comprehensive enough to determine if the units are # appropriate under CF # UDUnits accepts "s" as a unit of time but it should be <unit> since <epoch> if standard_name == 'time': valid_standard_units.assert_true(util.units_convertible(units, 'seconds since 1970-01-01'), 'time must be in a valid units format <unit> since <epoch> ' 'not {}'.format(units)) # UDunits can't tell the difference between east and north facing coordinates elif standard_name == 'latitude': # degrees is allowed if using a transformed grid allowed_units = cfutil.VALID_LAT_UNITS | {'degrees'} valid_standard_units.assert_true(units.lower() in allowed_units, 'variables defining latitude ("{}") must use degrees_north ' 'or degrees if defining a transformed grid. Currently ' '{}'.format(variable_name, units)) # UDunits can't tell the difference between east and north facing coordinates elif standard_name == 'longitude': # degrees is allowed if using a transformed grid allowed_units = cfutil.VALID_LON_UNITS | {'degrees'} valid_standard_units.assert_true(units.lower() in allowed_units, 'variables defining longitude ("{}") must use degrees_east ' 'or degrees if defining a transformed grid. Currently ' '{}'.format(variable_name, units)) # Standard Name table agrees the unit should be dimensionless elif std_name_units_dimensionless: valid_standard_units.assert_true(True, '') elif canonical_units is not None: valid_standard_units.assert_true(util.units_convertible(canonical_units, units), 'units for variable {} must be convertible to {} ' 'currently they are {}'.format(variable_name, canonical_units, units)) return valid_standard_units.to_result()
python
def _check_valid_standard_units(self, ds, variable_name): ''' Checks that the variable's units are appropriate for the standard name according to the CF standard name table and coordinate sections in CF 1.6 :param netCDF4.Dataset ds: An open netCDF dataset :param str variable_name: Name of the variable to be checked ''' variable = ds.variables[variable_name] units = getattr(variable, 'units', None) standard_name = getattr(variable, 'standard_name', None) valid_standard_units = TestCtx(BaseCheck.HIGH, self.section_titles["3.1"]) # If the variable is supposed to be dimensionless, it automatically passes std_name_units_dimensionless = cfutil.is_dimensionless_standard_name(self._std_names._root, standard_name) standard_name, standard_name_modifier = self._split_standard_name(standard_name) standard_entry = self._std_names.get(standard_name, None) if standard_entry is not None: canonical_units = standard_entry.canonical_units else: # Any unit comparisons with None returns False canonical_units = None # Other standard_name modifiers have the same units as the # unmodified standard name or are not checked for units. if standard_name_modifier == 'number_of_observations': canonical_units = '1' # This section represents the different cases where simple udunits # comparison isn't comprehensive enough to determine if the units are # appropriate under CF # UDUnits accepts "s" as a unit of time but it should be <unit> since <epoch> if standard_name == 'time': valid_standard_units.assert_true(util.units_convertible(units, 'seconds since 1970-01-01'), 'time must be in a valid units format <unit> since <epoch> ' 'not {}'.format(units)) # UDunits can't tell the difference between east and north facing coordinates elif standard_name == 'latitude': # degrees is allowed if using a transformed grid allowed_units = cfutil.VALID_LAT_UNITS | {'degrees'} valid_standard_units.assert_true(units.lower() in allowed_units, 'variables defining latitude ("{}") must use degrees_north ' 'or degrees if defining a transformed grid. Currently ' '{}'.format(variable_name, units)) # UDunits can't tell the difference between east and north facing coordinates elif standard_name == 'longitude': # degrees is allowed if using a transformed grid allowed_units = cfutil.VALID_LON_UNITS | {'degrees'} valid_standard_units.assert_true(units.lower() in allowed_units, 'variables defining longitude ("{}") must use degrees_east ' 'or degrees if defining a transformed grid. Currently ' '{}'.format(variable_name, units)) # Standard Name table agrees the unit should be dimensionless elif std_name_units_dimensionless: valid_standard_units.assert_true(True, '') elif canonical_units is not None: valid_standard_units.assert_true(util.units_convertible(canonical_units, units), 'units for variable {} must be convertible to {} ' 'currently they are {}'.format(variable_name, canonical_units, units)) return valid_standard_units.to_result()
[ "def", "_check_valid_standard_units", "(", "self", ",", "ds", ",", "variable_name", ")", ":", "variable", "=", "ds", ".", "variables", "[", "variable_name", "]", "units", "=", "getattr", "(", "variable", ",", "'units'", ",", "None", ")", "standard_name", "="...
Checks that the variable's units are appropriate for the standard name according to the CF standard name table and coordinate sections in CF 1.6 :param netCDF4.Dataset ds: An open netCDF dataset :param str variable_name: Name of the variable to be checked
[ "Checks", "that", "the", "variable", "s", "units", "are", "appropriate", "for", "the", "standard", "name", "according", "to", "the", "CF", "standard", "name", "table", "and", "coordinate", "sections", "in", "CF", "1", ".", "6" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1080-L1149
train
31,425
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_standard_name
def check_standard_name(self, ds): ''' Check a variables's standard_name attribute to ensure that it meets CF compliance. CF §3.3 A standard name is associated with a variable via the attribute standard_name which takes a string value comprised of a standard name optionally followed by one or more blanks and a standard name modifier :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] coord_vars = self._find_coord_vars(ds) aux_coord_vars = self._find_aux_coord_vars(ds) axis_vars = cfutil.get_axis_variables(ds) flag_vars = cfutil.get_flag_variables(ds) geophysical_vars = self._find_geophysical_vars(ds) variables_requiring_standard_names = coord_vars + aux_coord_vars + axis_vars + flag_vars + geophysical_vars for name in set(variables_requiring_standard_names): # Compression indices used in reduced horizontal grids or # compression schemes do not require attributes other than compress if cfutil.is_compression_coordinate(ds, name): continue ncvar = ds.variables[name] # §9 doesn't explicitly allow instance variables as coordinates but # it's loosely implied. Just in case, skip it. if hasattr(ncvar, 'cf_role'): continue # Unfortunately, §6.1 allows for string types to be listed as # coordinates. if ncvar.dtype.char == 'S': continue standard_name = getattr(ncvar, 'standard_name', None) standard_name, standard_name_modifier = self._split_standard_name(standard_name) long_name = getattr(ncvar, 'long_name', None) long_or_std_name = TestCtx(BaseCheck.HIGH, self.section_titles['3.3']) if long_name is not None: long_name_present = True long_or_std_name.assert_true(isinstance(long_name, basestring), "Attribute long_name for variable {} must be a string".format(name)) else: long_name_present = False # §1.3 The long_name and standard_name attributes are used to # describe the content of each variable. For backwards # compatibility with COARDS neither is required, but use of at # least one of them is strongly recommended. # If standard_name is not defined but long_name is, don't continue # the check for this variable if standard_name is not None: standard_name_present = True valid_std_name = TestCtx(BaseCheck.HIGH, self.section_titles['3.3']) valid_std_name.assert_true(isinstance(standard_name, basestring), "Attribute standard_name for variable {} must be a string".format(name)) if isinstance(standard_name, basestring): valid_std_name.assert_true(standard_name in self._std_names, "standard_name {} is not defined in Standard Name Table v{}".format( standard_name or 'undefined', self._std_names._version)) ret_val.append(valid_std_name.to_result()) # 2) optional - if modifiers, should be in table if standard_name_modifier is not None: valid_modifier = TestCtx(BaseCheck.HIGH, self.section_titles["3.3"]) allowed = ['detection_minimum', 'number_of_observations', 'standard_error', 'status_flag'] valid_modifier.assert_true(standard_name_modifier in allowed, "standard_name modifier {} for variable {} is not a valid modifier " "according to appendix C".format(standard_name_modifier, name)) ret_val.append(valid_modifier.to_result()) else: standard_name_present = False long_or_std_name.assert_true(long_name_present or standard_name_present, "Attribute long_name or/and standard_name is highly recommended for variable {}".format(name)) ret_val.append(long_or_std_name.to_result()) return ret_val
python
def check_standard_name(self, ds): ''' Check a variables's standard_name attribute to ensure that it meets CF compliance. CF §3.3 A standard name is associated with a variable via the attribute standard_name which takes a string value comprised of a standard name optionally followed by one or more blanks and a standard name modifier :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] coord_vars = self._find_coord_vars(ds) aux_coord_vars = self._find_aux_coord_vars(ds) axis_vars = cfutil.get_axis_variables(ds) flag_vars = cfutil.get_flag_variables(ds) geophysical_vars = self._find_geophysical_vars(ds) variables_requiring_standard_names = coord_vars + aux_coord_vars + axis_vars + flag_vars + geophysical_vars for name in set(variables_requiring_standard_names): # Compression indices used in reduced horizontal grids or # compression schemes do not require attributes other than compress if cfutil.is_compression_coordinate(ds, name): continue ncvar = ds.variables[name] # §9 doesn't explicitly allow instance variables as coordinates but # it's loosely implied. Just in case, skip it. if hasattr(ncvar, 'cf_role'): continue # Unfortunately, §6.1 allows for string types to be listed as # coordinates. if ncvar.dtype.char == 'S': continue standard_name = getattr(ncvar, 'standard_name', None) standard_name, standard_name_modifier = self._split_standard_name(standard_name) long_name = getattr(ncvar, 'long_name', None) long_or_std_name = TestCtx(BaseCheck.HIGH, self.section_titles['3.3']) if long_name is not None: long_name_present = True long_or_std_name.assert_true(isinstance(long_name, basestring), "Attribute long_name for variable {} must be a string".format(name)) else: long_name_present = False # §1.3 The long_name and standard_name attributes are used to # describe the content of each variable. For backwards # compatibility with COARDS neither is required, but use of at # least one of them is strongly recommended. # If standard_name is not defined but long_name is, don't continue # the check for this variable if standard_name is not None: standard_name_present = True valid_std_name = TestCtx(BaseCheck.HIGH, self.section_titles['3.3']) valid_std_name.assert_true(isinstance(standard_name, basestring), "Attribute standard_name for variable {} must be a string".format(name)) if isinstance(standard_name, basestring): valid_std_name.assert_true(standard_name in self._std_names, "standard_name {} is not defined in Standard Name Table v{}".format( standard_name or 'undefined', self._std_names._version)) ret_val.append(valid_std_name.to_result()) # 2) optional - if modifiers, should be in table if standard_name_modifier is not None: valid_modifier = TestCtx(BaseCheck.HIGH, self.section_titles["3.3"]) allowed = ['detection_minimum', 'number_of_observations', 'standard_error', 'status_flag'] valid_modifier.assert_true(standard_name_modifier in allowed, "standard_name modifier {} for variable {} is not a valid modifier " "according to appendix C".format(standard_name_modifier, name)) ret_val.append(valid_modifier.to_result()) else: standard_name_present = False long_or_std_name.assert_true(long_name_present or standard_name_present, "Attribute long_name or/and standard_name is highly recommended for variable {}".format(name)) ret_val.append(long_or_std_name.to_result()) return ret_val
[ "def", "check_standard_name", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "coord_vars", "=", "self", ".", "_find_coord_vars", "(", "ds", ")", "aux_coord_vars", "=", "self", ".", "_find_aux_coord_vars", "(", "ds", ")", "axis_vars", "=", "cfu...
Check a variables's standard_name attribute to ensure that it meets CF compliance. CF §3.3 A standard name is associated with a variable via the attribute standard_name which takes a string value comprised of a standard name optionally followed by one or more blanks and a standard name modifier :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "Check", "a", "variables", "s", "standard_name", "attribute", "to", "ensure", "that", "it", "meets", "CF", "compliance", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1151-L1240
train
31,426
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_ancillary_variables
def check_ancillary_variables(self, ds): ''' Checks the ancillary_variable attribute for all variables to ensure they are CF compliant. CF §3.4 It is a string attribute whose value is a blank separated list of variable names. The nature of the relationship between variables associated via ancillary_variables must be determined by other attributes. The variables listed by the ancillary_variables attribute will often have the standard name of the variable which points to them including a modifier (Appendix C, Standard Name Modifiers) to indicate the relationship. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] for ncvar in ds.get_variables_by_attributes(ancillary_variables=lambda x: x is not None): name = ncvar.name valid_ancillary = TestCtx(BaseCheck.HIGH, self.section_titles["3.4"]) ancillary_variables = ncvar.ancillary_variables valid_ancillary.assert_true(isinstance(ancillary_variables, basestring), "ancillary_variables attribute defined by {} " "should be string".format(name)) # Can't perform the second check if it's not a string if not isinstance(ancillary_variables, basestring): ret_val.append(valid_ancillary.to_result()) continue for ancillary_variable in ancillary_variables.split(): valid_ancillary.assert_true(ancillary_variable in ds.variables, "{} is not a variable in this dataset".format(ancillary_variable)) ret_val.append(valid_ancillary.to_result()) return ret_val
python
def check_ancillary_variables(self, ds): ''' Checks the ancillary_variable attribute for all variables to ensure they are CF compliant. CF §3.4 It is a string attribute whose value is a blank separated list of variable names. The nature of the relationship between variables associated via ancillary_variables must be determined by other attributes. The variables listed by the ancillary_variables attribute will often have the standard name of the variable which points to them including a modifier (Appendix C, Standard Name Modifiers) to indicate the relationship. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] for ncvar in ds.get_variables_by_attributes(ancillary_variables=lambda x: x is not None): name = ncvar.name valid_ancillary = TestCtx(BaseCheck.HIGH, self.section_titles["3.4"]) ancillary_variables = ncvar.ancillary_variables valid_ancillary.assert_true(isinstance(ancillary_variables, basestring), "ancillary_variables attribute defined by {} " "should be string".format(name)) # Can't perform the second check if it's not a string if not isinstance(ancillary_variables, basestring): ret_val.append(valid_ancillary.to_result()) continue for ancillary_variable in ancillary_variables.split(): valid_ancillary.assert_true(ancillary_variable in ds.variables, "{} is not a variable in this dataset".format(ancillary_variable)) ret_val.append(valid_ancillary.to_result()) return ret_val
[ "def", "check_ancillary_variables", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "for", "ncvar", "in", "ds", ".", "get_variables_by_attributes", "(", "ancillary_variables", "=", "lambda", "x", ":", "x", "is", "not", "None", ")", ":", "name",...
Checks the ancillary_variable attribute for all variables to ensure they are CF compliant. CF §3.4 It is a string attribute whose value is a blank separated list of variable names. The nature of the relationship between variables associated via ancillary_variables must be determined by other attributes. The variables listed by the ancillary_variables attribute will often have the standard name of the variable which points to them including a modifier (Appendix C, Standard Name Modifiers) to indicate the relationship. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "Checks", "the", "ancillary_variable", "attribute", "for", "all", "variables", "to", "ensure", "they", "are", "CF", "compliant", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1242-L1281
train
31,427
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_flags
def check_flags(self, ds): ''' Check the flag_values, flag_masks and flag_meanings attributes for variables to ensure they are CF compliant. CF §3.5 The attributes flag_values, flag_masks and flag_meanings are intended to make variables that contain flag values self describing. Status codes and Boolean (binary) condition flags may be expressed with different combinations of flag_values and flag_masks attribute definitions. The flag_values and flag_meanings attributes describe a status flag consisting of mutually exclusive coded values. The flag_meanings attribute is a string whose value is a blank separated list of descriptive words or phrases, one for each flag value. Each word or phrase should consist of characters from the alphanumeric set and the following five: '_', '-', '.', '+', '@'. The flag_masks and flag_meanings attributes describe a number of independent Boolean conditions using bit field notation by setting unique bits in each flag_masks value. The flag_masks, flag_values and flag_meanings attributes, used together, describe a blend of independent Boolean conditions and enumerated status codes. A flagged condition is identified by a bitwise AND of the variable value and each flag_masks value; a result that matches the flag_values value indicates a true condition. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] for name in cfutil.get_flag_variables(ds): variable = ds.variables[name] flag_values = getattr(variable, "flag_values", None) flag_masks = getattr(variable, "flag_masks", None) valid_flags_var = TestCtx(BaseCheck.HIGH, self.section_titles['3.5']) # Check that the variable defines mask or values valid_flags_var.assert_true(flag_values is not None or flag_masks is not None, "{} does not define either flag_masks or flag_values".format(name)) ret_val.append(valid_flags_var.to_result()) valid_meanings = self._check_flag_meanings(ds, name) ret_val.append(valid_meanings) # check flag_values if flag_values is not None: valid_values = self._check_flag_values(ds, name) ret_val.append(valid_values) # check flag_masks if flag_masks is not None: valid_masks = self._check_flag_masks(ds, name) ret_val.append(valid_masks) if flag_values is not None and flag_masks is not None: allv = list(map(lambda a, b: a & b == a, list(zip(flag_values, flag_masks)))) allvr = Result(BaseCheck.MEDIUM, all(allv), self.section_titles['3.5']) if not allvr.value: allvr.msgs = ["flag masks and flag values for '{}' combined don't equal flag value".format(name)] ret_val.append(allvr) return ret_val
python
def check_flags(self, ds): ''' Check the flag_values, flag_masks and flag_meanings attributes for variables to ensure they are CF compliant. CF §3.5 The attributes flag_values, flag_masks and flag_meanings are intended to make variables that contain flag values self describing. Status codes and Boolean (binary) condition flags may be expressed with different combinations of flag_values and flag_masks attribute definitions. The flag_values and flag_meanings attributes describe a status flag consisting of mutually exclusive coded values. The flag_meanings attribute is a string whose value is a blank separated list of descriptive words or phrases, one for each flag value. Each word or phrase should consist of characters from the alphanumeric set and the following five: '_', '-', '.', '+', '@'. The flag_masks and flag_meanings attributes describe a number of independent Boolean conditions using bit field notation by setting unique bits in each flag_masks value. The flag_masks, flag_values and flag_meanings attributes, used together, describe a blend of independent Boolean conditions and enumerated status codes. A flagged condition is identified by a bitwise AND of the variable value and each flag_masks value; a result that matches the flag_values value indicates a true condition. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] for name in cfutil.get_flag_variables(ds): variable = ds.variables[name] flag_values = getattr(variable, "flag_values", None) flag_masks = getattr(variable, "flag_masks", None) valid_flags_var = TestCtx(BaseCheck.HIGH, self.section_titles['3.5']) # Check that the variable defines mask or values valid_flags_var.assert_true(flag_values is not None or flag_masks is not None, "{} does not define either flag_masks or flag_values".format(name)) ret_val.append(valid_flags_var.to_result()) valid_meanings = self._check_flag_meanings(ds, name) ret_val.append(valid_meanings) # check flag_values if flag_values is not None: valid_values = self._check_flag_values(ds, name) ret_val.append(valid_values) # check flag_masks if flag_masks is not None: valid_masks = self._check_flag_masks(ds, name) ret_val.append(valid_masks) if flag_values is not None and flag_masks is not None: allv = list(map(lambda a, b: a & b == a, list(zip(flag_values, flag_masks)))) allvr = Result(BaseCheck.MEDIUM, all(allv), self.section_titles['3.5']) if not allvr.value: allvr.msgs = ["flag masks and flag values for '{}' combined don't equal flag value".format(name)] ret_val.append(allvr) return ret_val
[ "def", "check_flags", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "for", "name", "in", "cfutil", ".", "get_flag_variables", "(", "ds", ")", ":", "variable", "=", "ds", ".", "variables", "[", "name", "]", "flag_values", "=", "getattr", ...
Check the flag_values, flag_masks and flag_meanings attributes for variables to ensure they are CF compliant. CF §3.5 The attributes flag_values, flag_masks and flag_meanings are intended to make variables that contain flag values self describing. Status codes and Boolean (binary) condition flags may be expressed with different combinations of flag_values and flag_masks attribute definitions. The flag_values and flag_meanings attributes describe a status flag consisting of mutually exclusive coded values. The flag_meanings attribute is a string whose value is a blank separated list of descriptive words or phrases, one for each flag value. Each word or phrase should consist of characters from the alphanumeric set and the following five: '_', '-', '.', '+', '@'. The flag_masks and flag_meanings attributes describe a number of independent Boolean conditions using bit field notation by setting unique bits in each flag_masks value. The flag_masks, flag_values and flag_meanings attributes, used together, describe a blend of independent Boolean conditions and enumerated status codes. A flagged condition is identified by a bitwise AND of the variable value and each flag_masks value; a result that matches the flag_values value indicates a true condition. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "Check", "the", "flag_values", "flag_masks", "and", "flag_meanings", "attributes", "for", "variables", "to", "ensure", "they", "are", "CF", "compliant", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1283-L1351
train
31,428
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._check_flag_values
def _check_flag_values(self, ds, name): ''' Checks a variable's flag_values attribute for compliance under CF - flag_values exists as an array - unique elements in flag_values - flag_values si the same dtype as the variable - flag_values is the same length as flag_meanings :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Name of variable to check :rtype: compliance_checker.base.Result ''' variable = ds.variables[name] flag_values = variable.flag_values flag_meanings = getattr(variable, 'flag_meanings', None) valid_values = TestCtx(BaseCheck.HIGH, self.section_titles['3.5']) # flag_values must be a list of values, not a string or anything else valid_values.assert_true(isinstance(flag_values, np.ndarray), "{}'s flag_values must be an array of values not {}".format(name, type(flag_values))) # We can't perform any more checks if not isinstance(flag_values, np.ndarray): return valid_values.to_result() # the flag values must be independent, no repeating values flag_set = set(flag_values) valid_values.assert_true(len(flag_set) == len(flag_values), "{}'s flag_values must be independent and can not be repeated".format(name)) # the data type for flag_values should be the same as the variable valid_values.assert_true(variable.dtype.type == flag_values.dtype.type, "flag_values ({}) must be the same data type as {} ({})" "".format(flag_values.dtype.type, name, variable.dtype.type)) if isinstance(flag_meanings, basestring): flag_meanings = flag_meanings.split() valid_values.assert_true(len(flag_meanings) == len(flag_values), "{}'s flag_meanings and flag_values should have the same number ".format(name)+\ "of elements.") return valid_values.to_result()
python
def _check_flag_values(self, ds, name): ''' Checks a variable's flag_values attribute for compliance under CF - flag_values exists as an array - unique elements in flag_values - flag_values si the same dtype as the variable - flag_values is the same length as flag_meanings :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Name of variable to check :rtype: compliance_checker.base.Result ''' variable = ds.variables[name] flag_values = variable.flag_values flag_meanings = getattr(variable, 'flag_meanings', None) valid_values = TestCtx(BaseCheck.HIGH, self.section_titles['3.5']) # flag_values must be a list of values, not a string or anything else valid_values.assert_true(isinstance(flag_values, np.ndarray), "{}'s flag_values must be an array of values not {}".format(name, type(flag_values))) # We can't perform any more checks if not isinstance(flag_values, np.ndarray): return valid_values.to_result() # the flag values must be independent, no repeating values flag_set = set(flag_values) valid_values.assert_true(len(flag_set) == len(flag_values), "{}'s flag_values must be independent and can not be repeated".format(name)) # the data type for flag_values should be the same as the variable valid_values.assert_true(variable.dtype.type == flag_values.dtype.type, "flag_values ({}) must be the same data type as {} ({})" "".format(flag_values.dtype.type, name, variable.dtype.type)) if isinstance(flag_meanings, basestring): flag_meanings = flag_meanings.split() valid_values.assert_true(len(flag_meanings) == len(flag_values), "{}'s flag_meanings and flag_values should have the same number ".format(name)+\ "of elements.") return valid_values.to_result()
[ "def", "_check_flag_values", "(", "self", ",", "ds", ",", "name", ")", ":", "variable", "=", "ds", ".", "variables", "[", "name", "]", "flag_values", "=", "variable", ".", "flag_values", "flag_meanings", "=", "getattr", "(", "variable", ",", "'flag_meanings'...
Checks a variable's flag_values attribute for compliance under CF - flag_values exists as an array - unique elements in flag_values - flag_values si the same dtype as the variable - flag_values is the same length as flag_meanings :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Name of variable to check :rtype: compliance_checker.base.Result
[ "Checks", "a", "variable", "s", "flag_values", "attribute", "for", "compliance", "under", "CF" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1353-L1396
train
31,429
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._check_flag_masks
def _check_flag_masks(self, ds, name): ''' Check a variable's flag_masks attribute for compliance under CF - flag_masks exists as an array - flag_masks is the same dtype as the variable - variable's dtype can support bit-field - flag_masks is the same length as flag_meanings :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Variable name :rtype: compliance_checker.base.Result ''' variable = ds.variables[name] flag_masks = variable.flag_masks flag_meanings = getattr(ds, 'flag_meanings', None) valid_masks = TestCtx(BaseCheck.HIGH, self.section_titles['3.5']) valid_masks.assert_true(isinstance(flag_masks, np.ndarray), "{}'s flag_masks must be an array of values not {}".format(name, type(flag_masks))) if not isinstance(flag_masks, np.ndarray): return valid_masks.to_result() valid_masks.assert_true(variable.dtype.type == flag_masks.dtype.type, "flag_masks ({}) mustbe the same data type as {} ({})" "".format(flag_masks.dtype.type, name, variable.dtype.type)) type_ok = (np.issubdtype(variable.dtype, np.integer) or np.issubdtype(variable.dtype, 'S') or np.issubdtype(variable.dtype, 'b')) valid_masks.assert_true(type_ok, "{}'s data type must be capable of bit-field expression".format(name)) if isinstance(flag_meanings, basestring): flag_meanings = flag_meanings.split() valid_masks.assert_true(len(flag_meanings) == len(flag_masks), "{} flag_meanings and flag_masks should have the same number ".format(name)+\ "of elements.") return valid_masks.to_result()
python
def _check_flag_masks(self, ds, name): ''' Check a variable's flag_masks attribute for compliance under CF - flag_masks exists as an array - flag_masks is the same dtype as the variable - variable's dtype can support bit-field - flag_masks is the same length as flag_meanings :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Variable name :rtype: compliance_checker.base.Result ''' variable = ds.variables[name] flag_masks = variable.flag_masks flag_meanings = getattr(ds, 'flag_meanings', None) valid_masks = TestCtx(BaseCheck.HIGH, self.section_titles['3.5']) valid_masks.assert_true(isinstance(flag_masks, np.ndarray), "{}'s flag_masks must be an array of values not {}".format(name, type(flag_masks))) if not isinstance(flag_masks, np.ndarray): return valid_masks.to_result() valid_masks.assert_true(variable.dtype.type == flag_masks.dtype.type, "flag_masks ({}) mustbe the same data type as {} ({})" "".format(flag_masks.dtype.type, name, variable.dtype.type)) type_ok = (np.issubdtype(variable.dtype, np.integer) or np.issubdtype(variable.dtype, 'S') or np.issubdtype(variable.dtype, 'b')) valid_masks.assert_true(type_ok, "{}'s data type must be capable of bit-field expression".format(name)) if isinstance(flag_meanings, basestring): flag_meanings = flag_meanings.split() valid_masks.assert_true(len(flag_meanings) == len(flag_masks), "{} flag_meanings and flag_masks should have the same number ".format(name)+\ "of elements.") return valid_masks.to_result()
[ "def", "_check_flag_masks", "(", "self", ",", "ds", ",", "name", ")", ":", "variable", "=", "ds", ".", "variables", "[", "name", "]", "flag_masks", "=", "variable", ".", "flag_masks", "flag_meanings", "=", "getattr", "(", "ds", ",", "'flag_meanings'", ",",...
Check a variable's flag_masks attribute for compliance under CF - flag_masks exists as an array - flag_masks is the same dtype as the variable - variable's dtype can support bit-field - flag_masks is the same length as flag_meanings :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Variable name :rtype: compliance_checker.base.Result
[ "Check", "a", "variable", "s", "flag_masks", "attribute", "for", "compliance", "under", "CF" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1398-L1440
train
31,430
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._check_flag_meanings
def _check_flag_meanings(self, ds, name): ''' Check a variable's flag_meanings attribute for compliance under CF - flag_meanings exists - flag_meanings is a string - flag_meanings elements are valid strings :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Variable name :rtype: compliance_checker.base.Result ''' variable = ds.variables[name] flag_meanings = getattr(variable, 'flag_meanings', None) valid_meanings = TestCtx(BaseCheck.HIGH, self.section_titles['3.5']) valid_meanings.assert_true(flag_meanings is not None, "{}'s flag_meanings attribute is required for flag variables".format(name)) valid_meanings.assert_true(isinstance(flag_meanings, basestring), "{}'s flag_meanings attribute must be a string".format(name)) # We can't perform any additional checks if it's not a string if not isinstance(flag_meanings, basestring): return valid_meanings.to_result() valid_meanings.assert_true(len(flag_meanings) > 0, "{}'s flag_meanings can't be empty".format(name)) flag_regx = regex.compile(r"^[0-9A-Za-z_\-.+@]+$") meanings = flag_meanings.split() for meaning in meanings: if flag_regx.match(meaning) is None: valid_meanings.assert_true(False, "{}'s flag_meanings attribute defined an illegal flag meaning ".format(name)+\ "{}".format(meaning)) return valid_meanings.to_result()
python
def _check_flag_meanings(self, ds, name): ''' Check a variable's flag_meanings attribute for compliance under CF - flag_meanings exists - flag_meanings is a string - flag_meanings elements are valid strings :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Variable name :rtype: compliance_checker.base.Result ''' variable = ds.variables[name] flag_meanings = getattr(variable, 'flag_meanings', None) valid_meanings = TestCtx(BaseCheck.HIGH, self.section_titles['3.5']) valid_meanings.assert_true(flag_meanings is not None, "{}'s flag_meanings attribute is required for flag variables".format(name)) valid_meanings.assert_true(isinstance(flag_meanings, basestring), "{}'s flag_meanings attribute must be a string".format(name)) # We can't perform any additional checks if it's not a string if not isinstance(flag_meanings, basestring): return valid_meanings.to_result() valid_meanings.assert_true(len(flag_meanings) > 0, "{}'s flag_meanings can't be empty".format(name)) flag_regx = regex.compile(r"^[0-9A-Za-z_\-.+@]+$") meanings = flag_meanings.split() for meaning in meanings: if flag_regx.match(meaning) is None: valid_meanings.assert_true(False, "{}'s flag_meanings attribute defined an illegal flag meaning ".format(name)+\ "{}".format(meaning)) return valid_meanings.to_result()
[ "def", "_check_flag_meanings", "(", "self", ",", "ds", ",", "name", ")", ":", "variable", "=", "ds", ".", "variables", "[", "name", "]", "flag_meanings", "=", "getattr", "(", "variable", ",", "'flag_meanings'", ",", "None", ")", "valid_meanings", "=", "Tes...
Check a variable's flag_meanings attribute for compliance under CF - flag_meanings exists - flag_meanings is a string - flag_meanings elements are valid strings :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Variable name :rtype: compliance_checker.base.Result
[ "Check", "a", "variable", "s", "flag_meanings", "attribute", "for", "compliance", "under", "CF" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1442-L1478
train
31,431
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_coordinate_types
def check_coordinate_types(self, ds): ''' Check the axis attribute of coordinate variables CF §4 The attribute axis may be attached to a coordinate variable and given one of the values X, Y, Z or T which stand for a longitude, latitude, vertical, or time axis respectively. Alternatively the standard_name attribute may be used for direct identification. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] for variable in ds.get_variables_by_attributes(axis=lambda x: x is not None): name = variable.name # Coordinate compressions should not be checked as a valid # coordinate, which they are not. They are a mechanism to project # an array of indices onto a 2-d grid containing valid coordinates. if cfutil.is_compression_coordinate(ds, name): continue variable = ds.variables[name] # Even though it's not allowed in CF 1.6, it is allowed in CF 1.7 # and we see people do it, often. if hasattr(variable, 'cf_role'): continue # §6.1 allows for labels to be referenced as auxiliary coordinate # variables, which should not be checked like the rest of the # coordinates. if variable.dtype.char == 'S': continue axis = getattr(variable, 'axis', None) if axis is not None: valid_axis = self._check_axis(ds, name) ret_val.append(valid_axis) return ret_val
python
def check_coordinate_types(self, ds): ''' Check the axis attribute of coordinate variables CF §4 The attribute axis may be attached to a coordinate variable and given one of the values X, Y, Z or T which stand for a longitude, latitude, vertical, or time axis respectively. Alternatively the standard_name attribute may be used for direct identification. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] for variable in ds.get_variables_by_attributes(axis=lambda x: x is not None): name = variable.name # Coordinate compressions should not be checked as a valid # coordinate, which they are not. They are a mechanism to project # an array of indices onto a 2-d grid containing valid coordinates. if cfutil.is_compression_coordinate(ds, name): continue variable = ds.variables[name] # Even though it's not allowed in CF 1.6, it is allowed in CF 1.7 # and we see people do it, often. if hasattr(variable, 'cf_role'): continue # §6.1 allows for labels to be referenced as auxiliary coordinate # variables, which should not be checked like the rest of the # coordinates. if variable.dtype.char == 'S': continue axis = getattr(variable, 'axis', None) if axis is not None: valid_axis = self._check_axis(ds, name) ret_val.append(valid_axis) return ret_val
[ "def", "check_coordinate_types", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "for", "variable", "in", "ds", ".", "get_variables_by_attributes", "(", "axis", "=", "lambda", "x", ":", "x", "is", "not", "None", ")", ":", "name", "=", "vari...
Check the axis attribute of coordinate variables CF §4 The attribute axis may be attached to a coordinate variable and given one of the values X, Y, Z or T which stand for a longitude, latitude, vertical, or time axis respectively. Alternatively the standard_name attribute may be used for direct identification. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "Check", "the", "axis", "attribute", "of", "coordinate", "variables" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1486-L1527
train
31,432
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._check_axis
def _check_axis(self, ds, name): ''' Checks that the axis attribute is a string and an allowed value, namely one of 'T', 'X', 'Y', or 'Z'. :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Name of the variable :rtype: compliance_checker.base.Result ''' allowed_axis = ['T', 'X', 'Y', 'Z'] variable = ds.variables[name] axis = variable.axis valid_axis = TestCtx(BaseCheck.HIGH, self.section_titles['4']) axis_is_string = isinstance(axis, basestring), valid_axis.assert_true(axis_is_string and len(axis) > 0, "{}'s axis attribute must be a non-empty string".format(name)) # If axis isn't a string we can't continue any checks if not axis_is_string or len(axis) == 0: return valid_axis.to_result() valid_axis.assert_true(axis in allowed_axis, "{}'s axis attribute must be T, X, Y, or Z, ".format(name)+\ "currently {}".format(axis)) return valid_axis.to_result()
python
def _check_axis(self, ds, name): ''' Checks that the axis attribute is a string and an allowed value, namely one of 'T', 'X', 'Y', or 'Z'. :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Name of the variable :rtype: compliance_checker.base.Result ''' allowed_axis = ['T', 'X', 'Y', 'Z'] variable = ds.variables[name] axis = variable.axis valid_axis = TestCtx(BaseCheck.HIGH, self.section_titles['4']) axis_is_string = isinstance(axis, basestring), valid_axis.assert_true(axis_is_string and len(axis) > 0, "{}'s axis attribute must be a non-empty string".format(name)) # If axis isn't a string we can't continue any checks if not axis_is_string or len(axis) == 0: return valid_axis.to_result() valid_axis.assert_true(axis in allowed_axis, "{}'s axis attribute must be T, X, Y, or Z, ".format(name)+\ "currently {}".format(axis)) return valid_axis.to_result()
[ "def", "_check_axis", "(", "self", ",", "ds", ",", "name", ")", ":", "allowed_axis", "=", "[", "'T'", ",", "'X'", ",", "'Y'", ",", "'Z'", "]", "variable", "=", "ds", ".", "variables", "[", "name", "]", "axis", "=", "variable", ".", "axis", "valid_a...
Checks that the axis attribute is a string and an allowed value, namely one of 'T', 'X', 'Y', or 'Z'. :param netCDF4.Dataset ds: An open netCDF dataset :param str name: Name of the variable :rtype: compliance_checker.base.Result
[ "Checks", "that", "the", "axis", "attribute", "is", "a", "string", "and", "an", "allowed", "value", "namely", "one", "of", "T", "X", "Y", "or", "Z", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1529-L1555
train
31,433
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_dimensional_vertical_coordinate
def check_dimensional_vertical_coordinate(self, ds): ''' Check units for variables defining vertical position are valid under CF. CF §4.3.1 The units attribute for dimensional coordinates will be a string formatted as per the udunits.dat file. The acceptable units for vertical (depth or height) coordinate variables are: - units of pressure as listed in the file udunits.dat. For vertical axes the most commonly used of these include include bar, millibar, decibar, atmosphere (atm), pascal (Pa), and hPa. - units of length as listed in the file udunits.dat. For vertical axes the most commonly used of these include meter (metre, m), and kilometer (km). - other units listed in the file udunits.dat that may under certain circumstances reference vertical position such as units of density or temperature. Plural forms are also acceptable. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] z_variables = cfutil.get_z_variables(ds) #dimless_standard_names = [name for name, regx in dimless_vertical_coordinates] for name in z_variables: variable = ds.variables[name] standard_name = getattr(variable, 'standard_name', None) units = getattr(variable, 'units', None) positive = getattr(variable, 'positive', None) # Skip the variable if it's dimensionless if (hasattr(variable, 'formula_terms') or standard_name in dimless_vertical_coordinates): continue valid_vertical_coord = TestCtx(BaseCheck.HIGH, self.section_titles["4.3"]) valid_vertical_coord.assert_true(isinstance(units, basestring) and units, "§4.3.1 {}'s units must be defined for vertical coordinates, " "there is no default".format(name)) if not util.units_convertible('bar', units): valid_vertical_coord.assert_true(positive in ('up', 'down'), "{}: vertical coordinates not defining pressure must include " "a positive attribute that is either 'up' or 'down'".format(name)) # _check_valid_standard_units, part of the Chapter 3 checks, # already verifies that this coordinate has valid units ret_val.append(valid_vertical_coord.to_result()) return ret_val
python
def check_dimensional_vertical_coordinate(self, ds): ''' Check units for variables defining vertical position are valid under CF. CF §4.3.1 The units attribute for dimensional coordinates will be a string formatted as per the udunits.dat file. The acceptable units for vertical (depth or height) coordinate variables are: - units of pressure as listed in the file udunits.dat. For vertical axes the most commonly used of these include include bar, millibar, decibar, atmosphere (atm), pascal (Pa), and hPa. - units of length as listed in the file udunits.dat. For vertical axes the most commonly used of these include meter (metre, m), and kilometer (km). - other units listed in the file udunits.dat that may under certain circumstances reference vertical position such as units of density or temperature. Plural forms are also acceptable. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] z_variables = cfutil.get_z_variables(ds) #dimless_standard_names = [name for name, regx in dimless_vertical_coordinates] for name in z_variables: variable = ds.variables[name] standard_name = getattr(variable, 'standard_name', None) units = getattr(variable, 'units', None) positive = getattr(variable, 'positive', None) # Skip the variable if it's dimensionless if (hasattr(variable, 'formula_terms') or standard_name in dimless_vertical_coordinates): continue valid_vertical_coord = TestCtx(BaseCheck.HIGH, self.section_titles["4.3"]) valid_vertical_coord.assert_true(isinstance(units, basestring) and units, "§4.3.1 {}'s units must be defined for vertical coordinates, " "there is no default".format(name)) if not util.units_convertible('bar', units): valid_vertical_coord.assert_true(positive in ('up', 'down'), "{}: vertical coordinates not defining pressure must include " "a positive attribute that is either 'up' or 'down'".format(name)) # _check_valid_standard_units, part of the Chapter 3 checks, # already verifies that this coordinate has valid units ret_val.append(valid_vertical_coord.to_result()) return ret_val
[ "def", "check_dimensional_vertical_coordinate", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "z_variables", "=", "cfutil", ".", "get_z_variables", "(", "ds", ")", "#dimless_standard_names = [name for name, regx in dimless_vertical_coordinates]", "for", "nam...
Check units for variables defining vertical position are valid under CF. CF §4.3.1 The units attribute for dimensional coordinates will be a string formatted as per the udunits.dat file. The acceptable units for vertical (depth or height) coordinate variables are: - units of pressure as listed in the file udunits.dat. For vertical axes the most commonly used of these include include bar, millibar, decibar, atmosphere (atm), pascal (Pa), and hPa. - units of length as listed in the file udunits.dat. For vertical axes the most commonly used of these include meter (metre, m), and kilometer (km). - other units listed in the file udunits.dat that may under certain circumstances reference vertical position such as units of density or temperature. Plural forms are also acceptable. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "Check", "units", "for", "variables", "defining", "vertical", "position", "are", "valid", "under", "CF", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1746-L1800
train
31,434
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_dimensionless_vertical_coordinate
def check_dimensionless_vertical_coordinate(self, ds): ''' Check the validity of dimensionless coordinates under CF CF §4.3.2 The units attribute is not required for dimensionless coordinates. The standard_name attribute associates a coordinate with its definition from Appendix D, Dimensionless Vertical Coordinates. The definition provides a mapping between the dimensionless coordinate values and dimensional values that can positively and uniquely indicate the location of the data. A new attribute, formula_terms, is used to associate terms in the definitions with variables in a netCDF file. To maintain backwards compatibility with COARDS the use of these attributes is not required, but is strongly recommended. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] z_variables = cfutil.get_z_variables(ds) deprecated_units = [ 'level', 'layer', 'sigma_level' ] for name in z_variables: variable = ds.variables[name] standard_name = getattr(variable, 'standard_name', None) units = getattr(variable, 'units', None) formula_terms = getattr(variable, 'formula_terms', None) # Skip the variable if it's dimensional if (formula_terms is None and standard_name not in dimless_vertical_coordinates): continue is_not_deprecated = TestCtx(BaseCheck.LOW, self.section_titles["4.3"]) is_not_deprecated.assert_true(units not in deprecated_units, "§4.3.2: units are deprecated by CF in variable {}: {}" "".format(name, units)) ret_val.append(is_not_deprecated.to_result()) ret_val.append(self._check_formula_terms(ds, name)) return ret_val
python
def check_dimensionless_vertical_coordinate(self, ds): ''' Check the validity of dimensionless coordinates under CF CF §4.3.2 The units attribute is not required for dimensionless coordinates. The standard_name attribute associates a coordinate with its definition from Appendix D, Dimensionless Vertical Coordinates. The definition provides a mapping between the dimensionless coordinate values and dimensional values that can positively and uniquely indicate the location of the data. A new attribute, formula_terms, is used to associate terms in the definitions with variables in a netCDF file. To maintain backwards compatibility with COARDS the use of these attributes is not required, but is strongly recommended. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] z_variables = cfutil.get_z_variables(ds) deprecated_units = [ 'level', 'layer', 'sigma_level' ] for name in z_variables: variable = ds.variables[name] standard_name = getattr(variable, 'standard_name', None) units = getattr(variable, 'units', None) formula_terms = getattr(variable, 'formula_terms', None) # Skip the variable if it's dimensional if (formula_terms is None and standard_name not in dimless_vertical_coordinates): continue is_not_deprecated = TestCtx(BaseCheck.LOW, self.section_titles["4.3"]) is_not_deprecated.assert_true(units not in deprecated_units, "§4.3.2: units are deprecated by CF in variable {}: {}" "".format(name, units)) ret_val.append(is_not_deprecated.to_result()) ret_val.append(self._check_formula_terms(ds, name)) return ret_val
[ "def", "check_dimensionless_vertical_coordinate", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "z_variables", "=", "cfutil", ".", "get_z_variables", "(", "ds", ")", "deprecated_units", "=", "[", "'level'", ",", "'layer'", ",", "'sigma_level'", "...
Check the validity of dimensionless coordinates under CF CF §4.3.2 The units attribute is not required for dimensionless coordinates. The standard_name attribute associates a coordinate with its definition from Appendix D, Dimensionless Vertical Coordinates. The definition provides a mapping between the dimensionless coordinate values and dimensional values that can positively and uniquely indicate the location of the data. A new attribute, formula_terms, is used to associate terms in the definitions with variables in a netCDF file. To maintain backwards compatibility with COARDS the use of these attributes is not required, but is strongly recommended. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "Check", "the", "validity", "of", "dimensionless", "coordinates", "under", "CF" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1802-L1850
train
31,435
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._check_formula_terms
def _check_formula_terms(self, ds, coord): ''' Checks a dimensionless vertical coordinate contains valid formula_terms - formula_terms is a non-empty string - formula_terms matches regx - every variable defined in formula_terms exists :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result ''' variable = ds.variables[coord] standard_name = getattr(variable, 'standard_name', None) formula_terms = getattr(variable, 'formula_terms', None) valid_formula_terms = TestCtx(BaseCheck.HIGH, self.section_titles['4.3']) valid_formula_terms.assert_true(isinstance(formula_terms, basestring) and formula_terms, '§4.3.2: {}\'s formula_terms is a required attribute and must be a non-empty string' ''.format(coord)) # We can't check any more if not formula_terms: return valid_formula_terms.to_result() # check that the formula_terms are well formed and are present # The pattern for formula terms is always component: variable_name # the regex grouping always has component names in even positions and # the corresponding variable name in even positions. matches = regex.findall(r'([A-Za-z][A-Za-z0-9_]*: )([A-Za-z][A-Za-z0-9_]*)', variable.formula_terms) terms = set(m[0][:-2] for m in matches) # get the variables named in the formula terms and check if any # are not present in the dataset missing_vars = sorted(set(m[1] for m in matches) - set(ds.variables)) missing_fmt = "The following variable(s) referenced in {}:formula_terms are not present in the dataset: {}" valid_formula_terms.assert_true(len(missing_vars) == 0, missing_fmt.format(coord, ', '.join(missing_vars))) # try to reconstruct formula_terms by adding space in between the regex # matches. If it doesn't exactly match the original, the formatting # of the attribute is incorrect reconstructed_formula = ' '.join(m[0] + m[1] for m in matches) valid_formula_terms.assert_true(reconstructed_formula == formula_terms, "Attribute formula_terms is not well-formed") valid_formula_terms.assert_true(standard_name in dimless_vertical_coordinates, "unknown standard_name '{}' for dimensionless vertical coordinate {}" "".format(standard_name, coord)) if standard_name not in dimless_vertical_coordinates: return valid_formula_terms.to_result() valid_formula_terms.assert_true(no_missing_terms(standard_name, terms), "{}'s formula_terms are invalid for {}, please see appendix D of CF 1.6" "".format(coord, standard_name)) return valid_formula_terms.to_result()
python
def _check_formula_terms(self, ds, coord): ''' Checks a dimensionless vertical coordinate contains valid formula_terms - formula_terms is a non-empty string - formula_terms matches regx - every variable defined in formula_terms exists :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result ''' variable = ds.variables[coord] standard_name = getattr(variable, 'standard_name', None) formula_terms = getattr(variable, 'formula_terms', None) valid_formula_terms = TestCtx(BaseCheck.HIGH, self.section_titles['4.3']) valid_formula_terms.assert_true(isinstance(formula_terms, basestring) and formula_terms, '§4.3.2: {}\'s formula_terms is a required attribute and must be a non-empty string' ''.format(coord)) # We can't check any more if not formula_terms: return valid_formula_terms.to_result() # check that the formula_terms are well formed and are present # The pattern for formula terms is always component: variable_name # the regex grouping always has component names in even positions and # the corresponding variable name in even positions. matches = regex.findall(r'([A-Za-z][A-Za-z0-9_]*: )([A-Za-z][A-Za-z0-9_]*)', variable.formula_terms) terms = set(m[0][:-2] for m in matches) # get the variables named in the formula terms and check if any # are not present in the dataset missing_vars = sorted(set(m[1] for m in matches) - set(ds.variables)) missing_fmt = "The following variable(s) referenced in {}:formula_terms are not present in the dataset: {}" valid_formula_terms.assert_true(len(missing_vars) == 0, missing_fmt.format(coord, ', '.join(missing_vars))) # try to reconstruct formula_terms by adding space in between the regex # matches. If it doesn't exactly match the original, the formatting # of the attribute is incorrect reconstructed_formula = ' '.join(m[0] + m[1] for m in matches) valid_formula_terms.assert_true(reconstructed_formula == formula_terms, "Attribute formula_terms is not well-formed") valid_formula_terms.assert_true(standard_name in dimless_vertical_coordinates, "unknown standard_name '{}' for dimensionless vertical coordinate {}" "".format(standard_name, coord)) if standard_name not in dimless_vertical_coordinates: return valid_formula_terms.to_result() valid_formula_terms.assert_true(no_missing_terms(standard_name, terms), "{}'s formula_terms are invalid for {}, please see appendix D of CF 1.6" "".format(coord, standard_name)) return valid_formula_terms.to_result()
[ "def", "_check_formula_terms", "(", "self", ",", "ds", ",", "coord", ")", ":", "variable", "=", "ds", ".", "variables", "[", "coord", "]", "standard_name", "=", "getattr", "(", "variable", ",", "'standard_name'", ",", "None", ")", "formula_terms", "=", "ge...
Checks a dimensionless vertical coordinate contains valid formula_terms - formula_terms is a non-empty string - formula_terms matches regx - every variable defined in formula_terms exists :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result
[ "Checks", "a", "dimensionless", "vertical", "coordinate", "contains", "valid", "formula_terms" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1852-L1906
train
31,436
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_time_coordinate
def check_time_coordinate(self, ds): ''' Check variables defining time are valid under CF CF §4.4 Variables representing time must always explicitly include the units attribute; there is no default value. The units attribute takes a string value formatted as per the recommendations in the Udunits package. The acceptable units for time are listed in the udunits.dat file. The most commonly used of these strings (and their abbreviations) includes day (d), hour (hr, h), minute (min) and second (sec, s). Plural forms are also acceptable. The reference time string (appearing after the identifier since) may include date alone; date and time; or date, time, and time zone. The reference time is required. A reference time in year 0 has a special meaning (see Section 7.4, "Climatological Statistics"). Recommend that the unit year be used with caution. It is not a calendar year. For similar reasons the unit month should also be used with caution. A time coordinate is identifiable from its units string alone. Optionally, the time coordinate may be indicated additionally by providing the standard_name attribute with an appropriate value, and/or the axis attribute with the value T. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] for name in cfutil.get_time_variables(ds): variable = ds.variables[name] # Has units has_units = hasattr(variable, 'units') if not has_units: result = Result(BaseCheck.HIGH, False, self.section_titles['4.4'], ['%s does not have units' % name]) ret_val.append(result) continue # Correct and identifiable units result = Result(BaseCheck.HIGH, True, self.section_titles['4.4']) ret_val.append(result) correct_units = util.units_temporal(variable.units) reasoning = None if not correct_units: reasoning = ['%s does not have correct time units' % name] result = Result(BaseCheck.HIGH, correct_units, self.section_titles['4.4'], reasoning) ret_val.append(result) return ret_val
python
def check_time_coordinate(self, ds): ''' Check variables defining time are valid under CF CF §4.4 Variables representing time must always explicitly include the units attribute; there is no default value. The units attribute takes a string value formatted as per the recommendations in the Udunits package. The acceptable units for time are listed in the udunits.dat file. The most commonly used of these strings (and their abbreviations) includes day (d), hour (hr, h), minute (min) and second (sec, s). Plural forms are also acceptable. The reference time string (appearing after the identifier since) may include date alone; date and time; or date, time, and time zone. The reference time is required. A reference time in year 0 has a special meaning (see Section 7.4, "Climatological Statistics"). Recommend that the unit year be used with caution. It is not a calendar year. For similar reasons the unit month should also be used with caution. A time coordinate is identifiable from its units string alone. Optionally, the time coordinate may be indicated additionally by providing the standard_name attribute with an appropriate value, and/or the axis attribute with the value T. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] for name in cfutil.get_time_variables(ds): variable = ds.variables[name] # Has units has_units = hasattr(variable, 'units') if not has_units: result = Result(BaseCheck.HIGH, False, self.section_titles['4.4'], ['%s does not have units' % name]) ret_val.append(result) continue # Correct and identifiable units result = Result(BaseCheck.HIGH, True, self.section_titles['4.4']) ret_val.append(result) correct_units = util.units_temporal(variable.units) reasoning = None if not correct_units: reasoning = ['%s does not have correct time units' % name] result = Result(BaseCheck.HIGH, correct_units, self.section_titles['4.4'], reasoning) ret_val.append(result) return ret_val
[ "def", "check_time_coordinate", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "for", "name", "in", "cfutil", ".", "get_time_variables", "(", "ds", ")", ":", "variable", "=", "ds", ".", "variables", "[", "name", "]", "# Has units", "has_unit...
Check variables defining time are valid under CF CF §4.4 Variables representing time must always explicitly include the units attribute; there is no default value. The units attribute takes a string value formatted as per the recommendations in the Udunits package. The acceptable units for time are listed in the udunits.dat file. The most commonly used of these strings (and their abbreviations) includes day (d), hour (hr, h), minute (min) and second (sec, s). Plural forms are also acceptable. The reference time string (appearing after the identifier since) may include date alone; date and time; or date, time, and time zone. The reference time is required. A reference time in year 0 has a special meaning (see Section 7.4, "Climatological Statistics"). Recommend that the unit year be used with caution. It is not a calendar year. For similar reasons the unit month should also be used with caution. A time coordinate is identifiable from its units string alone. Optionally, the time coordinate may be indicated additionally by providing the standard_name attribute with an appropriate value, and/or the axis attribute with the value T. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "Check", "variables", "defining", "time", "are", "valid", "under", "CF" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L1908-L1967
train
31,437
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_aux_coordinates
def check_aux_coordinates(self, ds): ''' Chapter 5 paragraph 3 The dimensions of an auxiliary coordinate variable must be a subset of the dimensions of the variable with which the coordinate is associated, with two exceptions. First, string-valued coordinates (Section 6.1, "Labels") have a dimension for maximum string length. Second, in the ragged array representations of data (Chapter 9, Discrete Sampling Geometries), special methods are needed to connect the data and coordinates. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] geophysical_variables = self._find_geophysical_vars(ds) for name in geophysical_variables: variable = ds.variables[name] coordinates = getattr(variable, 'coordinates', None) # We use a set so we can assert dim_set = set(variable.dimensions) # No auxiliary coordinates, no check if not isinstance(coordinates, basestring) or coordinates == '': continue valid_aux_coords = TestCtx(BaseCheck.HIGH, self.section_titles["5"]) for aux_coord in coordinates.split(): valid_aux_coords.assert_true(aux_coord in ds.variables, "{}'s auxiliary coordinate specified by the coordinates attribute, {}, " "is not a variable in this dataset" "".format(name, aux_coord)) if aux_coord not in ds.variables: continue # §6.1 Allows for "labels" to be referenced as coordinates if ds.variables[aux_coord].dtype.char == 'S': continue aux_coord_dims = set(ds.variables[aux_coord].dimensions) valid_aux_coords.assert_true(aux_coord_dims.issubset(dim_set), "dimensions for auxiliary coordinate variable {} ({}) " "are not a subset of dimensions for variable {} ({})" "".format(aux_coord, ', '.join(aux_coord_dims), name, ', '.join(dim_set))) ret_val.append(valid_aux_coords.to_result()) return ret_val
python
def check_aux_coordinates(self, ds): ''' Chapter 5 paragraph 3 The dimensions of an auxiliary coordinate variable must be a subset of the dimensions of the variable with which the coordinate is associated, with two exceptions. First, string-valued coordinates (Section 6.1, "Labels") have a dimension for maximum string length. Second, in the ragged array representations of data (Chapter 9, Discrete Sampling Geometries), special methods are needed to connect the data and coordinates. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] geophysical_variables = self._find_geophysical_vars(ds) for name in geophysical_variables: variable = ds.variables[name] coordinates = getattr(variable, 'coordinates', None) # We use a set so we can assert dim_set = set(variable.dimensions) # No auxiliary coordinates, no check if not isinstance(coordinates, basestring) or coordinates == '': continue valid_aux_coords = TestCtx(BaseCheck.HIGH, self.section_titles["5"]) for aux_coord in coordinates.split(): valid_aux_coords.assert_true(aux_coord in ds.variables, "{}'s auxiliary coordinate specified by the coordinates attribute, {}, " "is not a variable in this dataset" "".format(name, aux_coord)) if aux_coord not in ds.variables: continue # §6.1 Allows for "labels" to be referenced as coordinates if ds.variables[aux_coord].dtype.char == 'S': continue aux_coord_dims = set(ds.variables[aux_coord].dimensions) valid_aux_coords.assert_true(aux_coord_dims.issubset(dim_set), "dimensions for auxiliary coordinate variable {} ({}) " "are not a subset of dimensions for variable {} ({})" "".format(aux_coord, ', '.join(aux_coord_dims), name, ', '.join(dim_set))) ret_val.append(valid_aux_coords.to_result()) return ret_val
[ "def", "check_aux_coordinates", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "geophysical_variables", "=", "self", ".", "_find_geophysical_vars", "(", "ds", ")", "for", "name", "in", "geophysical_variables", ":", "variable", "=", "ds", ".", "v...
Chapter 5 paragraph 3 The dimensions of an auxiliary coordinate variable must be a subset of the dimensions of the variable with which the coordinate is associated, with two exceptions. First, string-valued coordinates (Section 6.1, "Labels") have a dimension for maximum string length. Second, in the ragged array representations of data (Chapter 9, Discrete Sampling Geometries), special methods are needed to connect the data and coordinates. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "Chapter", "5", "paragraph", "3" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L2074-L2126
train
31,438
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_duplicate_axis
def check_duplicate_axis(self, ds): ''' Checks that no variable contains two coordinates defining the same axis. Chapter 5 paragraph 6 If an axis attribute is attached to an auxiliary coordinate variable, it can be used by applications in the same way the `axis` attribute attached to a coordinate variable is used. However, it is not permissible for a [geophysical variable] to have both a coordinate variable and an auxiliary coordinate variable, or more than one of either type of variable, having an `axis` attribute with any given value e.g. there must be no more than one axis attribute for X for any [geophysical variable]. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result :return: List of results ''' ret_val = [] geophysical_variables = self._find_geophysical_vars(ds) for name in geophysical_variables: no_duplicates = TestCtx(BaseCheck.HIGH, self.section_titles['5']) axis_map = cfutil.get_axis_map(ds, name) axes = [] # For every coordinate associated with this variable, keep track of # which coordinates define an axis and assert that there are no # duplicate axis attributes defined in the set of associated # coordinates. for axis, coordinates in axis_map.items(): for coordinate in coordinates: axis_attr = getattr(ds.variables[coordinate], 'axis', None) no_duplicates.assert_true(axis_attr is None or axis_attr not in axes, "'{}' has duplicate axis {} defined by {}".format(name, axis_attr, coordinate)) if axis_attr and axis_attr not in axes: axes.append(axis_attr) ret_val.append(no_duplicates.to_result()) return ret_val
python
def check_duplicate_axis(self, ds): ''' Checks that no variable contains two coordinates defining the same axis. Chapter 5 paragraph 6 If an axis attribute is attached to an auxiliary coordinate variable, it can be used by applications in the same way the `axis` attribute attached to a coordinate variable is used. However, it is not permissible for a [geophysical variable] to have both a coordinate variable and an auxiliary coordinate variable, or more than one of either type of variable, having an `axis` attribute with any given value e.g. there must be no more than one axis attribute for X for any [geophysical variable]. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result :return: List of results ''' ret_val = [] geophysical_variables = self._find_geophysical_vars(ds) for name in geophysical_variables: no_duplicates = TestCtx(BaseCheck.HIGH, self.section_titles['5']) axis_map = cfutil.get_axis_map(ds, name) axes = [] # For every coordinate associated with this variable, keep track of # which coordinates define an axis and assert that there are no # duplicate axis attributes defined in the set of associated # coordinates. for axis, coordinates in axis_map.items(): for coordinate in coordinates: axis_attr = getattr(ds.variables[coordinate], 'axis', None) no_duplicates.assert_true(axis_attr is None or axis_attr not in axes, "'{}' has duplicate axis {} defined by {}".format(name, axis_attr, coordinate)) if axis_attr and axis_attr not in axes: axes.append(axis_attr) ret_val.append(no_duplicates.to_result()) return ret_val
[ "def", "check_duplicate_axis", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "geophysical_variables", "=", "self", ".", "_find_geophysical_vars", "(", "ds", ")", "for", "name", "in", "geophysical_variables", ":", "no_duplicates", "=", "TestCtx", ...
Checks that no variable contains two coordinates defining the same axis. Chapter 5 paragraph 6 If an axis attribute is attached to an auxiliary coordinate variable, it can be used by applications in the same way the `axis` attribute attached to a coordinate variable is used. However, it is not permissible for a [geophysical variable] to have both a coordinate variable and an auxiliary coordinate variable, or more than one of either type of variable, having an `axis` attribute with any given value e.g. there must be no more than one axis attribute for X for any [geophysical variable]. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result :return: List of results
[ "Checks", "that", "no", "variable", "contains", "two", "coordinates", "defining", "the", "same", "axis", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L2128-L2170
train
31,439
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_multi_dimensional_coords
def check_multi_dimensional_coords(self, ds): ''' Checks that no multidimensional coordinate shares a name with its dimensions. Chapter 5 paragraph 4 We recommend that the name of a [multidimensional coordinate] should not match the name of any of its dimensions. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] # This can only apply to auxiliary coordinate variables for coord in self._find_aux_coord_vars(ds): variable = ds.variables[coord] if variable.ndim < 2: continue not_matching = TestCtx(BaseCheck.MEDIUM, self.section_titles['5']) not_matching.assert_true(coord not in variable.dimensions, '{} shares the same name as one of its dimensions' ''.format(coord)) ret_val.append(not_matching.to_result()) return ret_val
python
def check_multi_dimensional_coords(self, ds): ''' Checks that no multidimensional coordinate shares a name with its dimensions. Chapter 5 paragraph 4 We recommend that the name of a [multidimensional coordinate] should not match the name of any of its dimensions. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] # This can only apply to auxiliary coordinate variables for coord in self._find_aux_coord_vars(ds): variable = ds.variables[coord] if variable.ndim < 2: continue not_matching = TestCtx(BaseCheck.MEDIUM, self.section_titles['5']) not_matching.assert_true(coord not in variable.dimensions, '{} shares the same name as one of its dimensions' ''.format(coord)) ret_val.append(not_matching.to_result()) return ret_val
[ "def", "check_multi_dimensional_coords", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "# This can only apply to auxiliary coordinate variables", "for", "coord", "in", "self", ".", "_find_aux_coord_vars", "(", "ds", ")", ":", "variable", "=", "ds", "...
Checks that no multidimensional coordinate shares a name with its dimensions. Chapter 5 paragraph 4 We recommend that the name of a [multidimensional coordinate] should not match the name of any of its dimensions. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "Checks", "that", "no", "multidimensional", "coordinate", "shares", "a", "name", "with", "its", "dimensions", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L2172-L2200
train
31,440
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_grid_coordinates
def check_grid_coordinates(self, ds): """ 5.6 When the coordinate variables for a horizontal grid are not longitude and latitude, it is required that the true latitude and longitude coordinates be supplied via the coordinates attribute. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ ret_val = [] latitudes = cfutil.get_true_latitude_variables(ds) longitudes = cfutil.get_true_longitude_variables(ds) check_featues = [ '2d-regular-grid', '2d-static-grid', '3d-regular-grid', '3d-static-grid', 'mapped-grid', 'reduced-grid' ] # This one is tricky because there's a very subtle difference between # latitude as defined in Chapter 4 and "true" latitude as defined in # chapter 5. # For each geophysical variable that defines a grid, assert it is # associated with a true latitude or longitude coordinate. for variable in self._find_geophysical_vars(ds): # We use a set so we can do set-wise comparisons with coordinate # dimensions dimensions = set(ds.variables[variable].dimensions) # If it's not a grid, skip it if cfutil.guess_feature_type(ds, variable) not in check_featues: continue has_coords = TestCtx(BaseCheck.HIGH, self.section_titles['5.6']) # axis_map is a defaultdict(list) mapping the axis to a list of # coordinate names. For example: # {'X': ['lon'], 'Y':['lat'], 'Z':['lev']} # The mapping comes from the dimensions of the variable and the # contents of the `coordinates` attribute only. axis_map = cfutil.get_axis_map(ds, variable) msg = '{}\'s coordinate variable "{}" is not one of the variables identifying true '+\ 'latitude/longitude and its dimensions are not a subset of {}\'s dimensions' alt = '{} has no coordinate associated with a variable identified as true latitude/longitude; '+\ 'its coordinate variable should also share a subset of {}\'s dimensions' # Make sure we can find latitude and its dimensions are a subset _lat = None found_lat = False for lat in axis_map['Y']: _lat = lat is_subset_dims = set(ds.variables[lat].dimensions).issubset(dimensions) if is_subset_dims and lat in latitudes: found_lat = True break if _lat: has_coords.assert_true(found_lat, msg.format(variable, _lat, variable)) else: has_coords.assert_true(found_lat, alt.format(variable, variable)) # Make sure we can find longitude and its dimensions are a subset _lon = None found_lon = False for lon in axis_map['X']: _lon = lon is_subset_dims = set(ds.variables[lon].dimensions).issubset(dimensions) if is_subset_dims and lon in longitudes: found_lon = True break if _lon: has_coords.assert_true(found_lon, msg.format(variable, _lon, variable)) else: has_coords.assert_true(found_lon, alt.format(variable, variable)) ret_val.append(has_coords.to_result()) return ret_val
python
def check_grid_coordinates(self, ds): """ 5.6 When the coordinate variables for a horizontal grid are not longitude and latitude, it is required that the true latitude and longitude coordinates be supplied via the coordinates attribute. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ ret_val = [] latitudes = cfutil.get_true_latitude_variables(ds) longitudes = cfutil.get_true_longitude_variables(ds) check_featues = [ '2d-regular-grid', '2d-static-grid', '3d-regular-grid', '3d-static-grid', 'mapped-grid', 'reduced-grid' ] # This one is tricky because there's a very subtle difference between # latitude as defined in Chapter 4 and "true" latitude as defined in # chapter 5. # For each geophysical variable that defines a grid, assert it is # associated with a true latitude or longitude coordinate. for variable in self._find_geophysical_vars(ds): # We use a set so we can do set-wise comparisons with coordinate # dimensions dimensions = set(ds.variables[variable].dimensions) # If it's not a grid, skip it if cfutil.guess_feature_type(ds, variable) not in check_featues: continue has_coords = TestCtx(BaseCheck.HIGH, self.section_titles['5.6']) # axis_map is a defaultdict(list) mapping the axis to a list of # coordinate names. For example: # {'X': ['lon'], 'Y':['lat'], 'Z':['lev']} # The mapping comes from the dimensions of the variable and the # contents of the `coordinates` attribute only. axis_map = cfutil.get_axis_map(ds, variable) msg = '{}\'s coordinate variable "{}" is not one of the variables identifying true '+\ 'latitude/longitude and its dimensions are not a subset of {}\'s dimensions' alt = '{} has no coordinate associated with a variable identified as true latitude/longitude; '+\ 'its coordinate variable should also share a subset of {}\'s dimensions' # Make sure we can find latitude and its dimensions are a subset _lat = None found_lat = False for lat in axis_map['Y']: _lat = lat is_subset_dims = set(ds.variables[lat].dimensions).issubset(dimensions) if is_subset_dims and lat in latitudes: found_lat = True break if _lat: has_coords.assert_true(found_lat, msg.format(variable, _lat, variable)) else: has_coords.assert_true(found_lat, alt.format(variable, variable)) # Make sure we can find longitude and its dimensions are a subset _lon = None found_lon = False for lon in axis_map['X']: _lon = lon is_subset_dims = set(ds.variables[lon].dimensions).issubset(dimensions) if is_subset_dims and lon in longitudes: found_lon = True break if _lon: has_coords.assert_true(found_lon, msg.format(variable, _lon, variable)) else: has_coords.assert_true(found_lon, alt.format(variable, variable)) ret_val.append(has_coords.to_result()) return ret_val
[ "def", "check_grid_coordinates", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "latitudes", "=", "cfutil", ".", "get_true_latitude_variables", "(", "ds", ")", "longitudes", "=", "cfutil", ".", "get_true_longitude_variables", "(", "ds", ")", "chec...
5.6 When the coordinate variables for a horizontal grid are not longitude and latitude, it is required that the true latitude and longitude coordinates be supplied via the coordinates attribute. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "5", ".", "6", "When", "the", "coordinate", "variables", "for", "a", "horizontal", "grid", "are", "not", "longitude", "and", "latitude", "it", "is", "required", "that", "the", "true", "latitude", "and", "longitude", "coordinates", "be", "supplied", "via", "t...
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L2202-L2286
train
31,441
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_reduced_horizontal_grid
def check_reduced_horizontal_grid(self, ds): """ 5.3 A "reduced" longitude-latitude grid is one in which the points are arranged along constant latitude lines with the number of points on a latitude line decreasing toward the poles. Recommend that this type of gridded data be stored using the compression scheme described in Section 8.2, "Compression by Gathering". The compressed latitude and longitude auxiliary coordinate variables are identified by the coordinates attribute. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ ret_val = [] # Create a set of coordinate varaibles defining `compress` lats = set(cfutil.get_latitude_variables(ds)) lons = set(cfutil.get_longitude_variables(ds)) for name in self._find_geophysical_vars(ds): coords = getattr(ds.variables[name], 'coordinates', None) axis_map = cfutil.get_axis_map(ds, name) # If this variable has no coordinate that defines compression if 'C' not in axis_map: continue valid_rgrid = TestCtx(BaseCheck.HIGH, self.section_titles['5.3']) # Make sure reduced grid features define coordinates valid_rgrid.assert_true(isinstance(coords, basestring) and coords, "reduced grid feature {} must define coordinates attribute" "".format(name)) # We can't check anything else if there are no defined coordinates if not isinstance(coords, basestring) and coords: continue coord_set = set(coords.split()) # Make sure it's associated with valid lat and valid lon valid_rgrid.assert_true(len(coord_set.intersection(lons)) > 0, '{} must be associated with a valid longitude coordinate'.format(name)) valid_rgrid.assert_true(len(coord_set.intersection(lats)) > 0, '{} must be associated with a valid latitude coordinate'.format(name)) valid_rgrid.assert_true(len(axis_map['C']) == 1, '{} can not be associated with more than one compressed coordinates: ' '({})'.format(name, ', '.join(axis_map['C']))) for compressed_coord in axis_map['C']: coord = ds.variables[compressed_coord] compress = getattr(coord, 'compress', None) valid_rgrid.assert_true(isinstance(compress, basestring) and compress, "compress attribute for compression coordinate {} must be a non-empty string" "".format(compressed_coord)) if not isinstance(compress, basestring): continue for dim in compress.split(): valid_rgrid.assert_true(dim in ds.dimensions, "dimension {} referenced by {}:compress must exist" "".format(dim, compressed_coord)) ret_val.append(valid_rgrid.to_result()) return ret_val
python
def check_reduced_horizontal_grid(self, ds): """ 5.3 A "reduced" longitude-latitude grid is one in which the points are arranged along constant latitude lines with the number of points on a latitude line decreasing toward the poles. Recommend that this type of gridded data be stored using the compression scheme described in Section 8.2, "Compression by Gathering". The compressed latitude and longitude auxiliary coordinate variables are identified by the coordinates attribute. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ ret_val = [] # Create a set of coordinate varaibles defining `compress` lats = set(cfutil.get_latitude_variables(ds)) lons = set(cfutil.get_longitude_variables(ds)) for name in self._find_geophysical_vars(ds): coords = getattr(ds.variables[name], 'coordinates', None) axis_map = cfutil.get_axis_map(ds, name) # If this variable has no coordinate that defines compression if 'C' not in axis_map: continue valid_rgrid = TestCtx(BaseCheck.HIGH, self.section_titles['5.3']) # Make sure reduced grid features define coordinates valid_rgrid.assert_true(isinstance(coords, basestring) and coords, "reduced grid feature {} must define coordinates attribute" "".format(name)) # We can't check anything else if there are no defined coordinates if not isinstance(coords, basestring) and coords: continue coord_set = set(coords.split()) # Make sure it's associated with valid lat and valid lon valid_rgrid.assert_true(len(coord_set.intersection(lons)) > 0, '{} must be associated with a valid longitude coordinate'.format(name)) valid_rgrid.assert_true(len(coord_set.intersection(lats)) > 0, '{} must be associated with a valid latitude coordinate'.format(name)) valid_rgrid.assert_true(len(axis_map['C']) == 1, '{} can not be associated with more than one compressed coordinates: ' '({})'.format(name, ', '.join(axis_map['C']))) for compressed_coord in axis_map['C']: coord = ds.variables[compressed_coord] compress = getattr(coord, 'compress', None) valid_rgrid.assert_true(isinstance(compress, basestring) and compress, "compress attribute for compression coordinate {} must be a non-empty string" "".format(compressed_coord)) if not isinstance(compress, basestring): continue for dim in compress.split(): valid_rgrid.assert_true(dim in ds.dimensions, "dimension {} referenced by {}:compress must exist" "".format(dim, compressed_coord)) ret_val.append(valid_rgrid.to_result()) return ret_val
[ "def", "check_reduced_horizontal_grid", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "# Create a set of coordinate varaibles defining `compress`", "lats", "=", "set", "(", "cfutil", ".", "get_latitude_variables", "(", "ds", ")", ")", "lons", "=", "s...
5.3 A "reduced" longitude-latitude grid is one in which the points are arranged along constant latitude lines with the number of points on a latitude line decreasing toward the poles. Recommend that this type of gridded data be stored using the compression scheme described in Section 8.2, "Compression by Gathering". The compressed latitude and longitude auxiliary coordinate variables are identified by the coordinates attribute. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "5", ".", "3", "A", "reduced", "longitude", "-", "latitude", "grid", "is", "one", "in", "which", "the", "points", "are", "arranged", "along", "constant", "latitude", "lines", "with", "the", "number", "of", "points", "on", "a", "latitude", "line", "decreasi...
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L2288-L2349
train
31,442
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_grid_mapping
def check_grid_mapping(self, ds): """ 5.6 When the coordinate variables for a horizontal grid are not longitude and latitude, it is required that the true latitude and longitude coordinates be supplied via the coordinates attribute. If in addition it is desired to describe the mapping between the given coordinate variables and the true latitude and longitude coordinates, the attribute grid_mapping may be used to supply this description. This attribute is attached to data variables so that variables with different mappings may be present in a single file. The attribute takes a string value which is the name of another variable in the file that provides the description of the mapping via a collection of attached attributes. This variable is called a grid mapping variable and is of arbitrary type since it contains no data. Its purpose is to act as a container for the attributes that define the mapping. The one attribute that all grid mapping variables must have is grid_mapping_name which takes a string value that contains the mapping's name. The other attributes that define a specific mapping depend on the value of grid_mapping_name. The valid values of grid_mapping_name along with the attributes that provide specific map parameter values are described in Appendix F, Grid Mappings. When the coordinate variables for a horizontal grid are longitude and latitude, a grid mapping variable with grid_mapping_name of latitude_longitude may be used to specify the ellipsoid and prime meridian. In order to make use of a grid mapping to directly calculate latitude and longitude values it is necessary to associate the coordinate variables with the independent variables of the mapping. This is done by assigning a standard_name to the coordinate variable. The appropriate values of the standard_name depend on the grid mapping and are given in Appendix F, Grid Mappings. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ ret_val = [] grid_mapping_variables = cfutil.get_grid_mapping_variables(ds) # Check the grid_mapping attribute to be a non-empty string and that its reference exists for variable in ds.get_variables_by_attributes(grid_mapping=lambda x: x is not None): grid_mapping = getattr(variable, 'grid_mapping', None) defines_grid_mapping = TestCtx(BaseCheck.HIGH, self.section_titles["5.6"]) defines_grid_mapping.assert_true((isinstance(grid_mapping, basestring) and grid_mapping), "{}'s grid_mapping attribute must be a "+\ "space-separated non-empty string".format(variable.name)) if isinstance(grid_mapping, basestring): for grid_var_name in grid_mapping.split(): defines_grid_mapping.assert_true(grid_var_name in ds.variables, "grid mapping variable {} must exist in this dataset".format(variable.name)) ret_val.append(defines_grid_mapping.to_result()) # Check the grid mapping variables themselves for grid_var_name in grid_mapping_variables: valid_grid_mapping = TestCtx(BaseCheck.HIGH, self.section_titles["5.6"]) grid_var = ds.variables[grid_var_name] grid_mapping_name = getattr(grid_var, 'grid_mapping_name', None) # Grid mapping name must be in appendix F valid_grid_mapping.assert_true(grid_mapping_name in grid_mapping_dict, "{} is not a valid grid_mapping_name.".format(grid_mapping_name)+\ " See Appendix F for valid grid mappings") # The grid_mapping_dict has a values of: # - required attributes # - optional attributes (can't check) # - required standard_names defined # - at least one of these attributes must be defined # We can't do any of the other grid mapping checks if it's not a valid grid mapping name if grid_mapping_name not in grid_mapping_dict: ret_val.append(valid_grid_mapping.to_result()) continue grid_mapping = grid_mapping_dict[grid_mapping_name] required_attrs = grid_mapping[0] # Make sure all the required attributes are defined for req in required_attrs: valid_grid_mapping.assert_true(hasattr(grid_var, req), "{} is a required attribute for grid mapping {}".format(req, grid_mapping_name)) # Make sure that exactly one of the exclusive attributes exist if len(grid_mapping_dict) == 4: at_least_attr = grid_mapping_dict[3] number_found = 0 for attr in at_least_attr: if hasattr(grid_var, attr): number_found += 1 valid_grid_mapping.assert_true(number_found == 1, "grid mapping {}".format(grid_mapping_name) +\ "must define exactly one of these attributes: "+\ "{}".format(' or '.join(at_least_attr))) # Make sure that exactly one variable is defined for each of the required standard_names expected_std_names = grid_mapping[2] for expected_std_name in expected_std_names: found_vars = ds.get_variables_by_attributes(standard_name=expected_std_name) valid_grid_mapping.assert_true(len(found_vars) == 1, "grid mapping {} requires exactly".format(grid_mapping_name)+\ "one variable with standard_name "+\ "{} to be defined".format(expected_std_name)) ret_val.append(valid_grid_mapping.to_result()) return ret_val
python
def check_grid_mapping(self, ds): """ 5.6 When the coordinate variables for a horizontal grid are not longitude and latitude, it is required that the true latitude and longitude coordinates be supplied via the coordinates attribute. If in addition it is desired to describe the mapping between the given coordinate variables and the true latitude and longitude coordinates, the attribute grid_mapping may be used to supply this description. This attribute is attached to data variables so that variables with different mappings may be present in a single file. The attribute takes a string value which is the name of another variable in the file that provides the description of the mapping via a collection of attached attributes. This variable is called a grid mapping variable and is of arbitrary type since it contains no data. Its purpose is to act as a container for the attributes that define the mapping. The one attribute that all grid mapping variables must have is grid_mapping_name which takes a string value that contains the mapping's name. The other attributes that define a specific mapping depend on the value of grid_mapping_name. The valid values of grid_mapping_name along with the attributes that provide specific map parameter values are described in Appendix F, Grid Mappings. When the coordinate variables for a horizontal grid are longitude and latitude, a grid mapping variable with grid_mapping_name of latitude_longitude may be used to specify the ellipsoid and prime meridian. In order to make use of a grid mapping to directly calculate latitude and longitude values it is necessary to associate the coordinate variables with the independent variables of the mapping. This is done by assigning a standard_name to the coordinate variable. The appropriate values of the standard_name depend on the grid mapping and are given in Appendix F, Grid Mappings. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ ret_val = [] grid_mapping_variables = cfutil.get_grid_mapping_variables(ds) # Check the grid_mapping attribute to be a non-empty string and that its reference exists for variable in ds.get_variables_by_attributes(grid_mapping=lambda x: x is not None): grid_mapping = getattr(variable, 'grid_mapping', None) defines_grid_mapping = TestCtx(BaseCheck.HIGH, self.section_titles["5.6"]) defines_grid_mapping.assert_true((isinstance(grid_mapping, basestring) and grid_mapping), "{}'s grid_mapping attribute must be a "+\ "space-separated non-empty string".format(variable.name)) if isinstance(grid_mapping, basestring): for grid_var_name in grid_mapping.split(): defines_grid_mapping.assert_true(grid_var_name in ds.variables, "grid mapping variable {} must exist in this dataset".format(variable.name)) ret_val.append(defines_grid_mapping.to_result()) # Check the grid mapping variables themselves for grid_var_name in grid_mapping_variables: valid_grid_mapping = TestCtx(BaseCheck.HIGH, self.section_titles["5.6"]) grid_var = ds.variables[grid_var_name] grid_mapping_name = getattr(grid_var, 'grid_mapping_name', None) # Grid mapping name must be in appendix F valid_grid_mapping.assert_true(grid_mapping_name in grid_mapping_dict, "{} is not a valid grid_mapping_name.".format(grid_mapping_name)+\ " See Appendix F for valid grid mappings") # The grid_mapping_dict has a values of: # - required attributes # - optional attributes (can't check) # - required standard_names defined # - at least one of these attributes must be defined # We can't do any of the other grid mapping checks if it's not a valid grid mapping name if grid_mapping_name not in grid_mapping_dict: ret_val.append(valid_grid_mapping.to_result()) continue grid_mapping = grid_mapping_dict[grid_mapping_name] required_attrs = grid_mapping[0] # Make sure all the required attributes are defined for req in required_attrs: valid_grid_mapping.assert_true(hasattr(grid_var, req), "{} is a required attribute for grid mapping {}".format(req, grid_mapping_name)) # Make sure that exactly one of the exclusive attributes exist if len(grid_mapping_dict) == 4: at_least_attr = grid_mapping_dict[3] number_found = 0 for attr in at_least_attr: if hasattr(grid_var, attr): number_found += 1 valid_grid_mapping.assert_true(number_found == 1, "grid mapping {}".format(grid_mapping_name) +\ "must define exactly one of these attributes: "+\ "{}".format(' or '.join(at_least_attr))) # Make sure that exactly one variable is defined for each of the required standard_names expected_std_names = grid_mapping[2] for expected_std_name in expected_std_names: found_vars = ds.get_variables_by_attributes(standard_name=expected_std_name) valid_grid_mapping.assert_true(len(found_vars) == 1, "grid mapping {} requires exactly".format(grid_mapping_name)+\ "one variable with standard_name "+\ "{} to be defined".format(expected_std_name)) ret_val.append(valid_grid_mapping.to_result()) return ret_val
[ "def", "check_grid_mapping", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "grid_mapping_variables", "=", "cfutil", ".", "get_grid_mapping_variables", "(", "ds", ")", "# Check the grid_mapping attribute to be a non-empty string and that its reference exists", ...
5.6 When the coordinate variables for a horizontal grid are not longitude and latitude, it is required that the true latitude and longitude coordinates be supplied via the coordinates attribute. If in addition it is desired to describe the mapping between the given coordinate variables and the true latitude and longitude coordinates, the attribute grid_mapping may be used to supply this description. This attribute is attached to data variables so that variables with different mappings may be present in a single file. The attribute takes a string value which is the name of another variable in the file that provides the description of the mapping via a collection of attached attributes. This variable is called a grid mapping variable and is of arbitrary type since it contains no data. Its purpose is to act as a container for the attributes that define the mapping. The one attribute that all grid mapping variables must have is grid_mapping_name which takes a string value that contains the mapping's name. The other attributes that define a specific mapping depend on the value of grid_mapping_name. The valid values of grid_mapping_name along with the attributes that provide specific map parameter values are described in Appendix F, Grid Mappings. When the coordinate variables for a horizontal grid are longitude and latitude, a grid mapping variable with grid_mapping_name of latitude_longitude may be used to specify the ellipsoid and prime meridian. In order to make use of a grid mapping to directly calculate latitude and longitude values it is necessary to associate the coordinate variables with the independent variables of the mapping. This is done by assigning a standard_name to the coordinate variable. The appropriate values of the standard_name depend on the grid mapping and are given in Appendix F, Grid Mappings. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "5", ".", "6", "When", "the", "coordinate", "variables", "for", "a", "horizontal", "grid", "are", "not", "longitude", "and", "latitude", "it", "is", "required", "that", "the", "true", "latitude", "and", "longitude", "coordinates", "be", "supplied", "via", "t...
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L2353-L2466
train
31,443
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_geographic_region
def check_geographic_region(self, ds): """ 6.1.1 When data is representative of geographic regions which can be identified by names but which have complex boundaries that cannot practically be specified using longitude and latitude boundary coordinates, a labeled axis should be used to identify the regions. Recommend that the names be chosen from the list of standardized region names whenever possible. To indicate that the label values are standardized the variable that contains the labels must be given the standard_name attribute with the value region. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ ret_val = [] region_list = [ # TODO maybe move this (and other info like it) into a config file? 'africa', 'antarctica', 'arabian_sea', 'aral_sea', 'arctic_ocean', 'asia', 'atlantic_ocean', 'australia', 'baltic_sea', 'barents_opening', 'barents_sea', 'beaufort_sea', 'bellingshausen_sea', 'bering_sea', 'bering_strait', 'black_sea', 'canadian_archipelago', 'caribbean_sea', 'caspian_sea', 'central_america', 'chukchi_sea', 'contiguous_united_states', 'denmark_strait', 'drake_passage', 'east_china_sea', 'english_channel', 'eurasia', 'europe', 'faroe_scotland_channel', 'florida_bahamas_strait', 'fram_strait', 'global', 'global_land', 'global_ocean', 'great_lakes', 'greenland', 'gulf_of_alaska', 'gulf_of_mexico', 'hudson_bay', 'iceland_faroe_channel', 'indian_ocean', 'indonesian_throughflow', 'indo_pacific_ocean', 'irish_sea', 'lake_baykal', 'lake_chad', 'lake_malawi', 'lake_tanganyika', 'lake_victoria', 'mediterranean_sea', 'mozambique_channel', 'north_america', 'north_sea', 'norwegian_sea', 'pacific_equatorial_undercurrent', 'pacific_ocean', 'persian_gulf', 'red_sea', 'ross_sea', 'sea_of_japan', 'sea_of_okhotsk', 'south_america', 'south_china_sea', 'southern_ocean', 'taiwan_luzon_straits', 'weddell_sea', 'windward_passage', 'yellow_sea' ] for var in ds.get_variables_by_attributes(standard_name='region'): valid_region = TestCtx(BaseCheck.MEDIUM, self.section_titles["6.1"]) region = var[:] if np.ma.isMA(region): region = region.data valid_region.assert_true(''.join(region.astype(str)).lower() in region_list, "6.1.1 '{}' specified by '{}' is not a valid region".format( ''.join(region.astype(str)), var.name ) ) ret_val.append(valid_region.to_result()) return ret_val
python
def check_geographic_region(self, ds): """ 6.1.1 When data is representative of geographic regions which can be identified by names but which have complex boundaries that cannot practically be specified using longitude and latitude boundary coordinates, a labeled axis should be used to identify the regions. Recommend that the names be chosen from the list of standardized region names whenever possible. To indicate that the label values are standardized the variable that contains the labels must be given the standard_name attribute with the value region. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ ret_val = [] region_list = [ # TODO maybe move this (and other info like it) into a config file? 'africa', 'antarctica', 'arabian_sea', 'aral_sea', 'arctic_ocean', 'asia', 'atlantic_ocean', 'australia', 'baltic_sea', 'barents_opening', 'barents_sea', 'beaufort_sea', 'bellingshausen_sea', 'bering_sea', 'bering_strait', 'black_sea', 'canadian_archipelago', 'caribbean_sea', 'caspian_sea', 'central_america', 'chukchi_sea', 'contiguous_united_states', 'denmark_strait', 'drake_passage', 'east_china_sea', 'english_channel', 'eurasia', 'europe', 'faroe_scotland_channel', 'florida_bahamas_strait', 'fram_strait', 'global', 'global_land', 'global_ocean', 'great_lakes', 'greenland', 'gulf_of_alaska', 'gulf_of_mexico', 'hudson_bay', 'iceland_faroe_channel', 'indian_ocean', 'indonesian_throughflow', 'indo_pacific_ocean', 'irish_sea', 'lake_baykal', 'lake_chad', 'lake_malawi', 'lake_tanganyika', 'lake_victoria', 'mediterranean_sea', 'mozambique_channel', 'north_america', 'north_sea', 'norwegian_sea', 'pacific_equatorial_undercurrent', 'pacific_ocean', 'persian_gulf', 'red_sea', 'ross_sea', 'sea_of_japan', 'sea_of_okhotsk', 'south_america', 'south_china_sea', 'southern_ocean', 'taiwan_luzon_straits', 'weddell_sea', 'windward_passage', 'yellow_sea' ] for var in ds.get_variables_by_attributes(standard_name='region'): valid_region = TestCtx(BaseCheck.MEDIUM, self.section_titles["6.1"]) region = var[:] if np.ma.isMA(region): region = region.data valid_region.assert_true(''.join(region.astype(str)).lower() in region_list, "6.1.1 '{}' specified by '{}' is not a valid region".format( ''.join(region.astype(str)), var.name ) ) ret_val.append(valid_region.to_result()) return ret_val
[ "def", "check_geographic_region", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "region_list", "=", "[", "# TODO maybe move this (and other info like it) into a config file?", "'africa'", ",", "'antarctica'", ",", "'arabian_sea'", ",", "'aral_sea'", ",", ...
6.1.1 When data is representative of geographic regions which can be identified by names but which have complex boundaries that cannot practically be specified using longitude and latitude boundary coordinates, a labeled axis should be used to identify the regions. Recommend that the names be chosen from the list of standardized region names whenever possible. To indicate that the label values are standardized the variable that contains the labels must be given the standard_name attribute with the value region. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "6", ".", "1", ".", "1", "When", "data", "is", "representative", "of", "geographic", "regions", "which", "can", "be", "identified", "by", "names", "but", "which", "have", "complex", "boundaries", "that", "cannot", "practically", "be", "specified", "using", "...
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L2474-L2571
train
31,444
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_cell_boundaries
def check_cell_boundaries(self, ds): """ Checks the dimensions of cell boundary variables to ensure they are CF compliant. 7.1 To represent cells we add the attribute bounds to the appropriate coordinate variable(s). The value of bounds is the name of the variable that contains the vertices of the cell boundaries. We refer to this type of variable as a "boundary variable." A boundary variable will have one more dimension than its associated coordinate or auxiliary coordinate variable. The additional dimension should be the most rapidly varying one, and its size is the maximum number of cell vertices. Applications that process cell boundary data often times need to determine whether or not adjacent cells share an edge. In order to facilitate this type of processing the following restrictions are placed on the data in boundary variables: Bounds for 1-D coordinate variables For a coordinate variable such as lat(lat) with associated boundary variable latbnd(x,2), the interval endpoints must be ordered consistently with the associated coordinate, e.g., for an increasing coordinate, lat(1) > lat(0) implies latbnd(i,1) >= latbnd(i,0) for all i If adjacent intervals are contiguous, the shared endpoint must be represented indentically in each instance where it occurs in the boundary variable. For example, if the intervals that contain grid points lat(i) and lat(i+1) are contiguous, then latbnd(i+1,0) = latbnd(i,1). Bounds for 2-D coordinate variables with 4-sided cells In the case where the horizontal grid is described by two-dimensional auxiliary coordinate variables in latitude lat(n,m) and longitude lon(n,m), and the associated cells are four-sided, then the boundary variables are given in the form latbnd(n,m,4) and lonbnd(n,m,4), where the trailing index runs over the four vertices of the cells. Bounds for multi-dimensional coordinate variables with p-sided cells In all other cases, the bounds should be dimensioned (...,n,p), where (...,n) are the dimensions of the auxiliary coordinate variables, and p the number of vertices of the cells. The vertices must be traversed anticlockwise in the lon-lat plane as viewed from above. The starting vertex is not specified. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ # Note that test does not check monotonicity ret_val = [] reasoning = [] for variable_name, boundary_variable_name in cfutil.get_cell_boundary_map(ds).items(): variable = ds.variables[variable_name] valid = True reasoning = [] if boundary_variable_name not in ds.variables: valid = False reasoning.append("Boundary variable {} referenced by {} not ".format( boundary_variable_name, variable.name )+\ "found in dataset variables") else: boundary_variable = ds.variables[boundary_variable_name] # The number of dimensions in the bounds variable should always be # the number of dimensions in the referring variable + 1 if (boundary_variable.ndim < 2): valid = False reasoning.append('Boundary variable {} specified by {}'.format(boundary_variable.name, variable.name)+\ ' should have at least two dimensions to enclose the base '+\ 'case of a one dimensionsal variable') if (boundary_variable.ndim != variable.ndim + 1): valid = False reasoning.append('The number of dimensions of the variable %s is %s, but the ' 'number of dimensions of the boundary variable %s is %s. The boundary variable ' 'should have %s dimensions' % (variable.name, variable.ndim, boundary_variable.name, boundary_variable.ndim, variable.ndim + 1)) if (variable.dimensions[:] != boundary_variable.dimensions[:variable.ndim]): valid = False reasoning.append( u"Boundary variable coordinates (for {}) are in improper order: {}. Bounds-specific dimensions should be last" "".format(variable.name, boundary_variable.dimensions)) # ensure p vertices form a valid simplex given previous a...n # previous auxiliary coordinates if (ds.dimensions[boundary_variable.dimensions[-1]].size < len(boundary_variable.dimensions[:-1]) + 1): valid = False reasoning.append("Dimension {} of boundary variable (for {}) must have at least {} elements to form a simplex/closed cell with previous dimensions {}.".format( boundary_variable.name, variable.name, len(variable.dimensions) + 1, boundary_variable.dimensions[:-1]) ) result = Result(BaseCheck.MEDIUM, valid, self.section_titles["7.1"], reasoning) ret_val.append(result) return ret_val
python
def check_cell_boundaries(self, ds): """ Checks the dimensions of cell boundary variables to ensure they are CF compliant. 7.1 To represent cells we add the attribute bounds to the appropriate coordinate variable(s). The value of bounds is the name of the variable that contains the vertices of the cell boundaries. We refer to this type of variable as a "boundary variable." A boundary variable will have one more dimension than its associated coordinate or auxiliary coordinate variable. The additional dimension should be the most rapidly varying one, and its size is the maximum number of cell vertices. Applications that process cell boundary data often times need to determine whether or not adjacent cells share an edge. In order to facilitate this type of processing the following restrictions are placed on the data in boundary variables: Bounds for 1-D coordinate variables For a coordinate variable such as lat(lat) with associated boundary variable latbnd(x,2), the interval endpoints must be ordered consistently with the associated coordinate, e.g., for an increasing coordinate, lat(1) > lat(0) implies latbnd(i,1) >= latbnd(i,0) for all i If adjacent intervals are contiguous, the shared endpoint must be represented indentically in each instance where it occurs in the boundary variable. For example, if the intervals that contain grid points lat(i) and lat(i+1) are contiguous, then latbnd(i+1,0) = latbnd(i,1). Bounds for 2-D coordinate variables with 4-sided cells In the case where the horizontal grid is described by two-dimensional auxiliary coordinate variables in latitude lat(n,m) and longitude lon(n,m), and the associated cells are four-sided, then the boundary variables are given in the form latbnd(n,m,4) and lonbnd(n,m,4), where the trailing index runs over the four vertices of the cells. Bounds for multi-dimensional coordinate variables with p-sided cells In all other cases, the bounds should be dimensioned (...,n,p), where (...,n) are the dimensions of the auxiliary coordinate variables, and p the number of vertices of the cells. The vertices must be traversed anticlockwise in the lon-lat plane as viewed from above. The starting vertex is not specified. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ # Note that test does not check monotonicity ret_val = [] reasoning = [] for variable_name, boundary_variable_name in cfutil.get_cell_boundary_map(ds).items(): variable = ds.variables[variable_name] valid = True reasoning = [] if boundary_variable_name not in ds.variables: valid = False reasoning.append("Boundary variable {} referenced by {} not ".format( boundary_variable_name, variable.name )+\ "found in dataset variables") else: boundary_variable = ds.variables[boundary_variable_name] # The number of dimensions in the bounds variable should always be # the number of dimensions in the referring variable + 1 if (boundary_variable.ndim < 2): valid = False reasoning.append('Boundary variable {} specified by {}'.format(boundary_variable.name, variable.name)+\ ' should have at least two dimensions to enclose the base '+\ 'case of a one dimensionsal variable') if (boundary_variable.ndim != variable.ndim + 1): valid = False reasoning.append('The number of dimensions of the variable %s is %s, but the ' 'number of dimensions of the boundary variable %s is %s. The boundary variable ' 'should have %s dimensions' % (variable.name, variable.ndim, boundary_variable.name, boundary_variable.ndim, variable.ndim + 1)) if (variable.dimensions[:] != boundary_variable.dimensions[:variable.ndim]): valid = False reasoning.append( u"Boundary variable coordinates (for {}) are in improper order: {}. Bounds-specific dimensions should be last" "".format(variable.name, boundary_variable.dimensions)) # ensure p vertices form a valid simplex given previous a...n # previous auxiliary coordinates if (ds.dimensions[boundary_variable.dimensions[-1]].size < len(boundary_variable.dimensions[:-1]) + 1): valid = False reasoning.append("Dimension {} of boundary variable (for {}) must have at least {} elements to form a simplex/closed cell with previous dimensions {}.".format( boundary_variable.name, variable.name, len(variable.dimensions) + 1, boundary_variable.dimensions[:-1]) ) result = Result(BaseCheck.MEDIUM, valid, self.section_titles["7.1"], reasoning) ret_val.append(result) return ret_val
[ "def", "check_cell_boundaries", "(", "self", ",", "ds", ")", ":", "# Note that test does not check monotonicity", "ret_val", "=", "[", "]", "reasoning", "=", "[", "]", "for", "variable_name", ",", "boundary_variable_name", "in", "cfutil", ".", "get_cell_boundary_map",...
Checks the dimensions of cell boundary variables to ensure they are CF compliant. 7.1 To represent cells we add the attribute bounds to the appropriate coordinate variable(s). The value of bounds is the name of the variable that contains the vertices of the cell boundaries. We refer to this type of variable as a "boundary variable." A boundary variable will have one more dimension than its associated coordinate or auxiliary coordinate variable. The additional dimension should be the most rapidly varying one, and its size is the maximum number of cell vertices. Applications that process cell boundary data often times need to determine whether or not adjacent cells share an edge. In order to facilitate this type of processing the following restrictions are placed on the data in boundary variables: Bounds for 1-D coordinate variables For a coordinate variable such as lat(lat) with associated boundary variable latbnd(x,2), the interval endpoints must be ordered consistently with the associated coordinate, e.g., for an increasing coordinate, lat(1) > lat(0) implies latbnd(i,1) >= latbnd(i,0) for all i If adjacent intervals are contiguous, the shared endpoint must be represented indentically in each instance where it occurs in the boundary variable. For example, if the intervals that contain grid points lat(i) and lat(i+1) are contiguous, then latbnd(i+1,0) = latbnd(i,1). Bounds for 2-D coordinate variables with 4-sided cells In the case where the horizontal grid is described by two-dimensional auxiliary coordinate variables in latitude lat(n,m) and longitude lon(n,m), and the associated cells are four-sided, then the boundary variables are given in the form latbnd(n,m,4) and lonbnd(n,m,4), where the trailing index runs over the four vertices of the cells. Bounds for multi-dimensional coordinate variables with p-sided cells In all other cases, the bounds should be dimensioned (...,n,p), where (...,n) are the dimensions of the auxiliary coordinate variables, and p the number of vertices of the cells. The vertices must be traversed anticlockwise in the lon-lat plane as viewed from above. The starting vertex is not specified. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "Checks", "the", "dimensions", "of", "cell", "boundary", "variables", "to", "ensure", "they", "are", "CF", "compliant", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L2579-L2669
train
31,445
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_packed_data
def check_packed_data(self, ds): """ 8.1 Simple packing may be achieved through the use of the optional NUG defined attributes scale_factor and add_offset. After the data values of a variable have been read, they are to be multiplied by the scale_factor, and have add_offset added to them. The units of a variable should be representative of the unpacked data. If the scale_factor and add_offset attributes are of the same data type as the associated variable, the unpacked data is assumed to be of the same data type as the packed data. However, if the scale_factor and add_offset attributes are of a different data type from the variable (containing the packed data) then the unpacked data should match the type of these attributes, which must both be of type float or both be of type double. An additional restriction in this case is that the variable containing the packed data must be of type byte, short or int. It is not advised to unpack an int into a float as there is a potential precision loss. When data to be packed contains missing values the attributes that indicate missing values (_FillValue, valid_min, valid_max, valid_range) must be of the same data type as the packed data. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ ret_val = [] for name, var in ds.variables.items(): add_offset = getattr(var, 'add_offset', None) scale_factor = getattr(var, 'scale_factor', None) if not (add_offset or scale_factor): continue valid = True reasoning = [] # if only one of these attributes is defined, assume they # are the same type (value doesn't matter here) if not add_offset: add_offset = scale_factor if not scale_factor: scale_factor = add_offset if type(add_offset) != type(scale_factor): valid = False reasoning.append("Attributes add_offset and scale_factor have different data type.") elif type(scale_factor) != var.dtype.type: # Check both attributes are type float or double if not isinstance(scale_factor, (float, np.floating)): valid = False reasoning.append("Attributes add_offset and scale_factor are not of type float or double.") else: # Check variable type is byte, short or int if var.dtype.type not in [np.int, np.int8, np.int16, np.int32, np.int64]: valid = False reasoning.append("Variable is not of type byte, short, or int.") result = Result(BaseCheck.MEDIUM, valid, self.section_titles['8.1'], reasoning) ret_val.append(result) reasoning = [] valid = True # test further with _FillValue , valid_min , valid_max , valid_range if hasattr(var, "_FillValue"): if var._FillValue.dtype.type != var.dtype.type: valid = False reasoning.append("Type of %s:_FillValue attribute (%s) does not match variable type (%s)" % (name, var._FillValue.dtype.type, var.dtype.type)) if hasattr(var, "valid_min"): if var.valid_min.dtype.type != var.dtype.type: valid = False reasoning.append("Type of %svalid_min attribute (%s) does not match variable type (%s)" % (name, var.valid_min.dtype.type, var.dtype.type)) if hasattr(var, "valid_max"): if var.valid_max.dtype.type != var.dtype.type: valid = False reasoning.append("Type of %s:valid_max attribute (%s) does not match variable type (%s)" % (name, var.valid_max.dtype.type, var.dtype.type)) if hasattr(var, "valid_range"): if var.valid_range.dtype.type != var.dtype.type: valid = False reasoning.append("Type of %s:valid_range attribute (%s) does not match variable type (%s)" % (name, var.valid_range.dtype.type, var.dtype.type)) result = Result(BaseCheck.MEDIUM, valid, self.section_titles['8.1'], reasoning) ret_val.append(result) return ret_val
python
def check_packed_data(self, ds): """ 8.1 Simple packing may be achieved through the use of the optional NUG defined attributes scale_factor and add_offset. After the data values of a variable have been read, they are to be multiplied by the scale_factor, and have add_offset added to them. The units of a variable should be representative of the unpacked data. If the scale_factor and add_offset attributes are of the same data type as the associated variable, the unpacked data is assumed to be of the same data type as the packed data. However, if the scale_factor and add_offset attributes are of a different data type from the variable (containing the packed data) then the unpacked data should match the type of these attributes, which must both be of type float or both be of type double. An additional restriction in this case is that the variable containing the packed data must be of type byte, short or int. It is not advised to unpack an int into a float as there is a potential precision loss. When data to be packed contains missing values the attributes that indicate missing values (_FillValue, valid_min, valid_max, valid_range) must be of the same data type as the packed data. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ ret_val = [] for name, var in ds.variables.items(): add_offset = getattr(var, 'add_offset', None) scale_factor = getattr(var, 'scale_factor', None) if not (add_offset or scale_factor): continue valid = True reasoning = [] # if only one of these attributes is defined, assume they # are the same type (value doesn't matter here) if not add_offset: add_offset = scale_factor if not scale_factor: scale_factor = add_offset if type(add_offset) != type(scale_factor): valid = False reasoning.append("Attributes add_offset and scale_factor have different data type.") elif type(scale_factor) != var.dtype.type: # Check both attributes are type float or double if not isinstance(scale_factor, (float, np.floating)): valid = False reasoning.append("Attributes add_offset and scale_factor are not of type float or double.") else: # Check variable type is byte, short or int if var.dtype.type not in [np.int, np.int8, np.int16, np.int32, np.int64]: valid = False reasoning.append("Variable is not of type byte, short, or int.") result = Result(BaseCheck.MEDIUM, valid, self.section_titles['8.1'], reasoning) ret_val.append(result) reasoning = [] valid = True # test further with _FillValue , valid_min , valid_max , valid_range if hasattr(var, "_FillValue"): if var._FillValue.dtype.type != var.dtype.type: valid = False reasoning.append("Type of %s:_FillValue attribute (%s) does not match variable type (%s)" % (name, var._FillValue.dtype.type, var.dtype.type)) if hasattr(var, "valid_min"): if var.valid_min.dtype.type != var.dtype.type: valid = False reasoning.append("Type of %svalid_min attribute (%s) does not match variable type (%s)" % (name, var.valid_min.dtype.type, var.dtype.type)) if hasattr(var, "valid_max"): if var.valid_max.dtype.type != var.dtype.type: valid = False reasoning.append("Type of %s:valid_max attribute (%s) does not match variable type (%s)" % (name, var.valid_max.dtype.type, var.dtype.type)) if hasattr(var, "valid_range"): if var.valid_range.dtype.type != var.dtype.type: valid = False reasoning.append("Type of %s:valid_range attribute (%s) does not match variable type (%s)" % (name, var.valid_range.dtype.type, var.dtype.type)) result = Result(BaseCheck.MEDIUM, valid, self.section_titles['8.1'], reasoning) ret_val.append(result) return ret_val
[ "def", "check_packed_data", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "for", "name", ",", "var", "in", "ds", ".", "variables", ".", "items", "(", ")", ":", "add_offset", "=", "getattr", "(", "var", ",", "'add_offset'", ",", "None", ...
8.1 Simple packing may be achieved through the use of the optional NUG defined attributes scale_factor and add_offset. After the data values of a variable have been read, they are to be multiplied by the scale_factor, and have add_offset added to them. The units of a variable should be representative of the unpacked data. If the scale_factor and add_offset attributes are of the same data type as the associated variable, the unpacked data is assumed to be of the same data type as the packed data. However, if the scale_factor and add_offset attributes are of a different data type from the variable (containing the packed data) then the unpacked data should match the type of these attributes, which must both be of type float or both be of type double. An additional restriction in this case is that the variable containing the packed data must be of type byte, short or int. It is not advised to unpack an int into a float as there is a potential precision loss. When data to be packed contains missing values the attributes that indicate missing values (_FillValue, valid_min, valid_max, valid_range) must be of the same data type as the packed data. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "8", ".", "1", "Simple", "packing", "may", "be", "achieved", "through", "the", "use", "of", "the", "optional", "NUG", "defined", "attributes", "scale_factor", "and", "add_offset", ".", "After", "the", "data", "values", "of", "a", "variable", "have", "been", ...
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L3040-L3127
train
31,446
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_compression_gathering
def check_compression_gathering(self, ds): """ At the current time the netCDF interface does not provide for packing data. However a simple packing may be achieved through the use of the optional NUG defined attributes scale_factor and add_offset . After the data values of a variable have been read, they are to be multiplied by the scale_factor , and have add_offset added to them. If both attributes are present, the data are scaled before the offset is added. When scaled data are written, the application should first subtract the offset and then divide by the scale factor. The units of a variable should be representative of the unpacked data. This standard is more restrictive than the NUG with respect to the use of the scale_factor and add_offset attributes; ambiguities and precision problems related to data type conversions are resolved by these restrictions. If the scale_factor and add_offset attributes are of the same data type as the associated variable, the unpacked data is assumed to be of the same data type as the packed data. However, if the scale_factor and add_offset attributes are of a different data type from the variable (containing the packed data) then the unpacked data should match the type of these attributes, which must both be of type float or both be of type double . An additional restriction in this case is that the variable containing the packed data must be of type byte , short or int . It is not advised to unpack an int into a float as there is a potential precision loss. When data to be packed contains missing values the attributes that indicate missing values ( _FillValue , valid_min , valid_max , valid_range ) must be of the same data type as the packed data. See Section 2.5.1, “Missing Data” for a discussion of how applications should treat variables that have attributes indicating both missing values and transformations defined by a scale and/or offset. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ ret_val = [] for compress_var in ds.get_variables_by_attributes(compress=lambda s: s is not None): valid = True reasoning = [] # puts the referenced variable being compressed into a set compress_set = set(compress_var.compress.split(' ')) if compress_var.ndim != 1: valid = False reasoning.append("Compression variable {} may only have one dimension".format(compress_var.name)) # ensure compression variable is a proper index, and thus is an # signed or unsigned integer type of some sort if compress_var.dtype.kind not in {'i', 'u'}: valid = False reasoning.append("Compression variable {} must be an integer type to form a proper array index".format(compress_var.name)) # make sure all the variables referred to are contained by the # variables. if not compress_set.issubset(ds.dimensions): not_in_dims = sorted(compress_set.difference(ds.dimensions)) valid = False reasoning.append("The following dimensions referenced by the compress attribute of variable {} do not exist: {}".format(compress_var.name, not_in_dims)) result = Result(BaseCheck.MEDIUM, valid, self.section_titles['8.2'], reasoning) ret_val.append(result) return ret_val
python
def check_compression_gathering(self, ds): """ At the current time the netCDF interface does not provide for packing data. However a simple packing may be achieved through the use of the optional NUG defined attributes scale_factor and add_offset . After the data values of a variable have been read, they are to be multiplied by the scale_factor , and have add_offset added to them. If both attributes are present, the data are scaled before the offset is added. When scaled data are written, the application should first subtract the offset and then divide by the scale factor. The units of a variable should be representative of the unpacked data. This standard is more restrictive than the NUG with respect to the use of the scale_factor and add_offset attributes; ambiguities and precision problems related to data type conversions are resolved by these restrictions. If the scale_factor and add_offset attributes are of the same data type as the associated variable, the unpacked data is assumed to be of the same data type as the packed data. However, if the scale_factor and add_offset attributes are of a different data type from the variable (containing the packed data) then the unpacked data should match the type of these attributes, which must both be of type float or both be of type double . An additional restriction in this case is that the variable containing the packed data must be of type byte , short or int . It is not advised to unpack an int into a float as there is a potential precision loss. When data to be packed contains missing values the attributes that indicate missing values ( _FillValue , valid_min , valid_max , valid_range ) must be of the same data type as the packed data. See Section 2.5.1, “Missing Data” for a discussion of how applications should treat variables that have attributes indicating both missing values and transformations defined by a scale and/or offset. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ ret_val = [] for compress_var in ds.get_variables_by_attributes(compress=lambda s: s is not None): valid = True reasoning = [] # puts the referenced variable being compressed into a set compress_set = set(compress_var.compress.split(' ')) if compress_var.ndim != 1: valid = False reasoning.append("Compression variable {} may only have one dimension".format(compress_var.name)) # ensure compression variable is a proper index, and thus is an # signed or unsigned integer type of some sort if compress_var.dtype.kind not in {'i', 'u'}: valid = False reasoning.append("Compression variable {} must be an integer type to form a proper array index".format(compress_var.name)) # make sure all the variables referred to are contained by the # variables. if not compress_set.issubset(ds.dimensions): not_in_dims = sorted(compress_set.difference(ds.dimensions)) valid = False reasoning.append("The following dimensions referenced by the compress attribute of variable {} do not exist: {}".format(compress_var.name, not_in_dims)) result = Result(BaseCheck.MEDIUM, valid, self.section_titles['8.2'], reasoning) ret_val.append(result) return ret_val
[ "def", "check_compression_gathering", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "for", "compress_var", "in", "ds", ".", "get_variables_by_attributes", "(", "compress", "=", "lambda", "s", ":", "s", "is", "not", "None", ")", ":", "valid", ...
At the current time the netCDF interface does not provide for packing data. However a simple packing may be achieved through the use of the optional NUG defined attributes scale_factor and add_offset . After the data values of a variable have been read, they are to be multiplied by the scale_factor , and have add_offset added to them. If both attributes are present, the data are scaled before the offset is added. When scaled data are written, the application should first subtract the offset and then divide by the scale factor. The units of a variable should be representative of the unpacked data. This standard is more restrictive than the NUG with respect to the use of the scale_factor and add_offset attributes; ambiguities and precision problems related to data type conversions are resolved by these restrictions. If the scale_factor and add_offset attributes are of the same data type as the associated variable, the unpacked data is assumed to be of the same data type as the packed data. However, if the scale_factor and add_offset attributes are of a different data type from the variable (containing the packed data) then the unpacked data should match the type of these attributes, which must both be of type float or both be of type double . An additional restriction in this case is that the variable containing the packed data must be of type byte , short or int . It is not advised to unpack an int into a float as there is a potential precision loss. When data to be packed contains missing values the attributes that indicate missing values ( _FillValue , valid_min , valid_max , valid_range ) must be of the same data type as the packed data. See Section 2.5.1, “Missing Data” for a discussion of how applications should treat variables that have attributes indicating both missing values and transformations defined by a scale and/or offset. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "At", "the", "current", "time", "the", "netCDF", "interface", "does", "not", "provide", "for", "packing", "data", ".", "However", "a", "simple", "packing", "may", "be", "achieved", "through", "the", "use", "of", "the", "optional", "NUG", "defined", "attribut...
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L3129-L3194
train
31,447
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_all_features_are_same_type
def check_all_features_are_same_type(self, ds): """ Check that the feature types in a dataset are all the same. 9.1 The features contained within a collection must always be of the same type; and all the collections in a CF file must be of the same feature type. point, timeSeries, trajectory, profile, timeSeriesProfile, trajectoryProfile. The space-time coordinates that are indicated for each feature are mandatory. However a featureType may also include other space-time coordinates which are not mandatory (notably the z coordinate). :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result """ all_the_same = TestCtx(BaseCheck.HIGH, self.section_titles['9.1']) feature_types_found = defaultdict(list) for name in self._find_geophysical_vars(ds): feature = cfutil.guess_feature_type(ds, name) # If we can't figure out the feature type, don't penalize, just # make a note of it in the messages if feature is not None: feature_types_found[feature].append(name) else: all_the_same.messages.append("Unidentifiable feature for variable {}" "".format(name)) feature_description = ', '.join(['{} ({})'.format(ftr, ', '.join(vrs)) for ftr, vrs in feature_types_found.items()]) all_the_same.assert_true(len(feature_types_found) < 2, "Different feature types discovered in this dataset: {}" "".format(feature_description)) return all_the_same.to_result()
python
def check_all_features_are_same_type(self, ds): """ Check that the feature types in a dataset are all the same. 9.1 The features contained within a collection must always be of the same type; and all the collections in a CF file must be of the same feature type. point, timeSeries, trajectory, profile, timeSeriesProfile, trajectoryProfile. The space-time coordinates that are indicated for each feature are mandatory. However a featureType may also include other space-time coordinates which are not mandatory (notably the z coordinate). :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result """ all_the_same = TestCtx(BaseCheck.HIGH, self.section_titles['9.1']) feature_types_found = defaultdict(list) for name in self._find_geophysical_vars(ds): feature = cfutil.guess_feature_type(ds, name) # If we can't figure out the feature type, don't penalize, just # make a note of it in the messages if feature is not None: feature_types_found[feature].append(name) else: all_the_same.messages.append("Unidentifiable feature for variable {}" "".format(name)) feature_description = ', '.join(['{} ({})'.format(ftr, ', '.join(vrs)) for ftr, vrs in feature_types_found.items()]) all_the_same.assert_true(len(feature_types_found) < 2, "Different feature types discovered in this dataset: {}" "".format(feature_description)) return all_the_same.to_result()
[ "def", "check_all_features_are_same_type", "(", "self", ",", "ds", ")", ":", "all_the_same", "=", "TestCtx", "(", "BaseCheck", ".", "HIGH", ",", "self", ".", "section_titles", "[", "'9.1'", "]", ")", "feature_types_found", "=", "defaultdict", "(", "list", ")",...
Check that the feature types in a dataset are all the same. 9.1 The features contained within a collection must always be of the same type; and all the collections in a CF file must be of the same feature type. point, timeSeries, trajectory, profile, timeSeriesProfile, trajectoryProfile. The space-time coordinates that are indicated for each feature are mandatory. However a featureType may also include other space-time coordinates which are not mandatory (notably the z coordinate). :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result
[ "Check", "that", "the", "feature", "types", "in", "a", "dataset", "are", "all", "the", "same", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L3202-L3235
train
31,448
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_cf_role
def check_cf_role(self, ds): """ Check variables defining cf_role for legal cf_role values. §9.5 The only acceptable values of cf_role for Discrete Geometry CF data sets are timeseries_id, profile_id, and trajectory_id :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result """ valid_roles = ['timeseries_id', 'profile_id', 'trajectory_id'] variable_count = 0 for variable in ds.get_variables_by_attributes(cf_role=lambda x: x is not None): variable_count += 1 name = variable.name valid_cf_role = TestCtx(BaseCheck.HIGH, self.section_titles['9.5']) cf_role = variable.cf_role valid_cf_role.assert_true(cf_role in valid_roles, "{} is not a valid cf_role value. It must be one of {}" "".format(name, ', '.join(valid_roles))) if variable_count > 0: m = "§9.5 The only acceptable values of cf_role for Discrete Geometry CF"+\ " data sets are timeseries_id, profile_id, and trajectory_id" valid_cf_role.assert_true(variable_count < 3, m) return valid_cf_role.to_result()
python
def check_cf_role(self, ds): """ Check variables defining cf_role for legal cf_role values. §9.5 The only acceptable values of cf_role for Discrete Geometry CF data sets are timeseries_id, profile_id, and trajectory_id :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result """ valid_roles = ['timeseries_id', 'profile_id', 'trajectory_id'] variable_count = 0 for variable in ds.get_variables_by_attributes(cf_role=lambda x: x is not None): variable_count += 1 name = variable.name valid_cf_role = TestCtx(BaseCheck.HIGH, self.section_titles['9.5']) cf_role = variable.cf_role valid_cf_role.assert_true(cf_role in valid_roles, "{} is not a valid cf_role value. It must be one of {}" "".format(name, ', '.join(valid_roles))) if variable_count > 0: m = "§9.5 The only acceptable values of cf_role for Discrete Geometry CF"+\ " data sets are timeseries_id, profile_id, and trajectory_id" valid_cf_role.assert_true(variable_count < 3, m) return valid_cf_role.to_result()
[ "def", "check_cf_role", "(", "self", ",", "ds", ")", ":", "valid_roles", "=", "[", "'timeseries_id'", ",", "'profile_id'", ",", "'trajectory_id'", "]", "variable_count", "=", "0", "for", "variable", "in", "ds", ".", "get_variables_by_attributes", "(", "cf_role",...
Check variables defining cf_role for legal cf_role values. §9.5 The only acceptable values of cf_role for Discrete Geometry CF data sets are timeseries_id, profile_id, and trajectory_id :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result
[ "Check", "variables", "defining", "cf_role", "for", "legal", "cf_role", "values", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L3260-L3284
train
31,449
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_variable_features
def check_variable_features(self, ds): ''' Checks the variable feature types match the dataset featureType attribute :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] feature_list = ['point', 'timeSeries', 'trajectory', 'profile', 'timeSeriesProfile', 'trajectoryProfile'] # Don't bother checking if it's not a legal featureType feature_type = getattr(ds, 'featureType', None) if feature_type not in feature_list: return [] feature_type_map = { 'point': [ 'point' ], 'timeSeries': [ 'timeseries', 'multi-timeseries-orthogonal', 'multi-timeseries-incomplete', ], 'trajectory': [ 'cf-trajectory', 'single-trajectory', ], 'profile': [ 'profile-orthogonal', 'profile-incomplete' ], 'timeSeriesProfile': [ 'timeseries-profile-single-station', 'timeseries-profile-multi-station', 'timeseries-profile-single-ortho-time', 'timeseries-profile-multi-ortho-time', 'timeseries-profile-ortho-depth', 'timeseries-profile-incomplete' ], 'trajectoryProfile': [ 'trajectory-profile-orthogonal', 'trajectory-profile-incomplete' ] } for name in self._find_geophysical_vars(ds): variable_feature = cfutil.guess_feature_type(ds, name) # If we can't figure it out, don't check it. if variable_feature is None: continue matching_feature = TestCtx(BaseCheck.MEDIUM, self.section_titles['9.1']) matching_feature.assert_true(variable_feature in feature_type_map[feature_type], '{} is not a {}, it is detected as a {}' ''.format(name, feature_type, variable_feature)) ret_val.append(matching_feature.to_result()) return ret_val
python
def check_variable_features(self, ds): ''' Checks the variable feature types match the dataset featureType attribute :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] feature_list = ['point', 'timeSeries', 'trajectory', 'profile', 'timeSeriesProfile', 'trajectoryProfile'] # Don't bother checking if it's not a legal featureType feature_type = getattr(ds, 'featureType', None) if feature_type not in feature_list: return [] feature_type_map = { 'point': [ 'point' ], 'timeSeries': [ 'timeseries', 'multi-timeseries-orthogonal', 'multi-timeseries-incomplete', ], 'trajectory': [ 'cf-trajectory', 'single-trajectory', ], 'profile': [ 'profile-orthogonal', 'profile-incomplete' ], 'timeSeriesProfile': [ 'timeseries-profile-single-station', 'timeseries-profile-multi-station', 'timeseries-profile-single-ortho-time', 'timeseries-profile-multi-ortho-time', 'timeseries-profile-ortho-depth', 'timeseries-profile-incomplete' ], 'trajectoryProfile': [ 'trajectory-profile-orthogonal', 'trajectory-profile-incomplete' ] } for name in self._find_geophysical_vars(ds): variable_feature = cfutil.guess_feature_type(ds, name) # If we can't figure it out, don't check it. if variable_feature is None: continue matching_feature = TestCtx(BaseCheck.MEDIUM, self.section_titles['9.1']) matching_feature.assert_true(variable_feature in feature_type_map[feature_type], '{} is not a {}, it is detected as a {}' ''.format(name, feature_type, variable_feature)) ret_val.append(matching_feature.to_result()) return ret_val
[ "def", "check_variable_features", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "feature_list", "=", "[", "'point'", ",", "'timeSeries'", ",", "'trajectory'", ",", "'profile'", ",", "'timeSeriesProfile'", ",", "'trajectoryProfile'", "]", "# Don't b...
Checks the variable feature types match the dataset featureType attribute :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "Checks", "the", "variable", "feature", "types", "match", "the", "dataset", "featureType", "attribute" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L3286-L3343
train
31,450
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck.check_hints
def check_hints(self, ds): ''' Checks for potentially mislabeled metadata and makes suggestions for how to correct :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] ret_val.extend(self._check_hint_bounds(ds)) return ret_val
python
def check_hints(self, ds): ''' Checks for potentially mislabeled metadata and makes suggestions for how to correct :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] ret_val.extend(self._check_hint_bounds(ds)) return ret_val
[ "def", "check_hints", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "ret_val", ".", "extend", "(", "self", ".", "_check_hint_bounds", "(", "ds", ")", ")", "return", "ret_val" ]
Checks for potentially mislabeled metadata and makes suggestions for how to correct :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "Checks", "for", "potentially", "mislabeled", "metadata", "and", "makes", "suggestions", "for", "how", "to", "correct" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L3345-L3357
train
31,451
ioos/compliance-checker
compliance_checker/cf/cf.py
CFBaseCheck._check_hint_bounds
def _check_hint_bounds(self, ds): ''' Checks for variables ending with _bounds, if they are not cell methods, make the recommendation :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] boundary_variables = cfutil.get_cell_boundary_variables(ds) for name in ds.variables: if name.endswith('_bounds') and name not in boundary_variables: msg = ('{} might be a cell boundary variable but there are no variables that define it ' 'as a boundary using the `bounds` attribute.'.format(name)) result = Result(BaseCheck.LOW, True, self.section_titles['7.1'], [msg]) ret_val.append(result) return ret_val
python
def _check_hint_bounds(self, ds): ''' Checks for variables ending with _bounds, if they are not cell methods, make the recommendation :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] boundary_variables = cfutil.get_cell_boundary_variables(ds) for name in ds.variables: if name.endswith('_bounds') and name not in boundary_variables: msg = ('{} might be a cell boundary variable but there are no variables that define it ' 'as a boundary using the `bounds` attribute.'.format(name)) result = Result(BaseCheck.LOW, True, self.section_titles['7.1'], [msg]) ret_val.append(result) return ret_val
[ "def", "_check_hint_bounds", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "boundary_variables", "=", "cfutil", ".", "get_cell_boundary_variables", "(", "ds", ")", "for", "name", "in", "ds", ".", "variables", ":", "if", "name", ".", "endswith...
Checks for variables ending with _bounds, if they are not cell methods, make the recommendation :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results
[ "Checks", "for", "variables", "ending", "with", "_bounds", "if", "they", "are", "not", "cell", "methods", "make", "the", "recommendation" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/cf.py#L3359-L3380
train
31,452
ioos/compliance-checker
compliance_checker/protocols/netcdf.py
is_netcdf
def is_netcdf(url): ''' Returns True if the URL points to a valid local netCDF file :param str url: Location of file on the file system ''' # Try an obvious exclusion of remote resources if url.startswith('http'): return False # If it's a known extension, give it a shot if url.endswith('nc'): return True # Brute force with open(url, 'rb') as f: magic_number = f.read(4) if len(magic_number) < 4: return False if is_classic_netcdf(magic_number): return True elif is_hdf5(magic_number): return True return False
python
def is_netcdf(url): ''' Returns True if the URL points to a valid local netCDF file :param str url: Location of file on the file system ''' # Try an obvious exclusion of remote resources if url.startswith('http'): return False # If it's a known extension, give it a shot if url.endswith('nc'): return True # Brute force with open(url, 'rb') as f: magic_number = f.read(4) if len(magic_number) < 4: return False if is_classic_netcdf(magic_number): return True elif is_hdf5(magic_number): return True return False
[ "def", "is_netcdf", "(", "url", ")", ":", "# Try an obvious exclusion of remote resources", "if", "url", ".", "startswith", "(", "'http'", ")", ":", "return", "False", "# If it's a known extension, give it a shot", "if", "url", ".", "endswith", "(", "'nc'", ")", ":"...
Returns True if the URL points to a valid local netCDF file :param str url: Location of file on the file system
[ "Returns", "True", "if", "the", "URL", "points", "to", "a", "valid", "local", "netCDF", "file" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/protocols/netcdf.py#L9-L33
train
31,453
ioos/compliance-checker
compliance_checker/cf/util.py
get_safe
def get_safe(dict_instance, keypath, default=None): """ Returns a value with in a nested dict structure from a dot separated path expression such as "system.server.host" or a list of key entries @retval Value if found or None """ try: obj = dict_instance keylist = keypath if type(keypath) is list else keypath.split('.') for key in keylist: obj = obj[key] return obj except Exception: return default
python
def get_safe(dict_instance, keypath, default=None): """ Returns a value with in a nested dict structure from a dot separated path expression such as "system.server.host" or a list of key entries @retval Value if found or None """ try: obj = dict_instance keylist = keypath if type(keypath) is list else keypath.split('.') for key in keylist: obj = obj[key] return obj except Exception: return default
[ "def", "get_safe", "(", "dict_instance", ",", "keypath", ",", "default", "=", "None", ")", ":", "try", ":", "obj", "=", "dict_instance", "keylist", "=", "keypath", "if", "type", "(", "keypath", ")", "is", "list", "else", "keypath", ".", "split", "(", "...
Returns a value with in a nested dict structure from a dot separated path expression such as "system.server.host" or a list of key entries @retval Value if found or None
[ "Returns", "a", "value", "with", "in", "a", "nested", "dict", "structure", "from", "a", "dot", "separated", "path", "expression", "such", "as", "system", ".", "server", ".", "host", "or", "a", "list", "of", "key", "entries" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/util.py#L161-L174
train
31,454
ioos/compliance-checker
compliance_checker/cf/util.py
download_cf_standard_name_table
def download_cf_standard_name_table(version, location=None): ''' Downloads the specified CF standard name table version and saves it to file :param str version: CF standard name table version number (i.e 34) :param str location: Path/filename to write downloaded xml file to ''' if location is None: # This case occurs when updating the packaged version from command line location = resource_filename('compliance_checker', 'data/cf-standard-name-table.xml') url = "http://cfconventions.org/Data/cf-standard-names/{0}/src/cf-standard-name-table.xml".format(version) r = requests.get(url, allow_redirects=True) if r.status_code == 200: print("Downloading cf-standard-names table version {0} from: {1}".format(version, url), file=sys.stderr) with open(location, 'wb') as f: f.write(r.content) else: r.raise_for_status() return
python
def download_cf_standard_name_table(version, location=None): ''' Downloads the specified CF standard name table version and saves it to file :param str version: CF standard name table version number (i.e 34) :param str location: Path/filename to write downloaded xml file to ''' if location is None: # This case occurs when updating the packaged version from command line location = resource_filename('compliance_checker', 'data/cf-standard-name-table.xml') url = "http://cfconventions.org/Data/cf-standard-names/{0}/src/cf-standard-name-table.xml".format(version) r = requests.get(url, allow_redirects=True) if r.status_code == 200: print("Downloading cf-standard-names table version {0} from: {1}".format(version, url), file=sys.stderr) with open(location, 'wb') as f: f.write(r.content) else: r.raise_for_status() return
[ "def", "download_cf_standard_name_table", "(", "version", ",", "location", "=", "None", ")", ":", "if", "location", "is", "None", ":", "# This case occurs when updating the packaged version from command line", "location", "=", "resource_filename", "(", "'compliance_checker'",...
Downloads the specified CF standard name table version and saves it to file :param str version: CF standard name table version number (i.e 34) :param str location: Path/filename to write downloaded xml file to
[ "Downloads", "the", "specified", "CF", "standard", "name", "table", "version", "and", "saves", "it", "to", "file" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/util.py#L320-L339
train
31,455
ioos/compliance-checker
compliance_checker/cf/util.py
find_coord_vars
def find_coord_vars(ncds): """ Finds all coordinate variables in a dataset. A variable with the same name as a dimension is called a coordinate variable. """ coord_vars = [] for d in ncds.dimensions: if d in ncds.variables and ncds.variables[d].dimensions == (d,): coord_vars.append(ncds.variables[d]) return coord_vars
python
def find_coord_vars(ncds): """ Finds all coordinate variables in a dataset. A variable with the same name as a dimension is called a coordinate variable. """ coord_vars = [] for d in ncds.dimensions: if d in ncds.variables and ncds.variables[d].dimensions == (d,): coord_vars.append(ncds.variables[d]) return coord_vars
[ "def", "find_coord_vars", "(", "ncds", ")", ":", "coord_vars", "=", "[", "]", "for", "d", "in", "ncds", ".", "dimensions", ":", "if", "d", "in", "ncds", ".", "variables", "and", "ncds", ".", "variables", "[", "d", "]", ".", "dimensions", "==", "(", ...
Finds all coordinate variables in a dataset. A variable with the same name as a dimension is called a coordinate variable.
[ "Finds", "all", "coordinate", "variables", "in", "a", "dataset", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/util.py#L406-L418
train
31,456
ioos/compliance-checker
compliance_checker/cf/util.py
is_time_variable
def is_time_variable(varname, var): """ Identifies if a variable is represents time """ satisfied = varname.lower() == 'time' satisfied |= getattr(var, 'standard_name', '') == 'time' satisfied |= getattr(var, 'axis', '') == 'T' satisfied |= units_convertible('seconds since 1900-01-01', getattr(var, 'units', '')) return satisfied
python
def is_time_variable(varname, var): """ Identifies if a variable is represents time """ satisfied = varname.lower() == 'time' satisfied |= getattr(var, 'standard_name', '') == 'time' satisfied |= getattr(var, 'axis', '') == 'T' satisfied |= units_convertible('seconds since 1900-01-01', getattr(var, 'units', '')) return satisfied
[ "def", "is_time_variable", "(", "varname", ",", "var", ")", ":", "satisfied", "=", "varname", ".", "lower", "(", ")", "==", "'time'", "satisfied", "|=", "getattr", "(", "var", ",", "'standard_name'", ",", "''", ")", "==", "'time'", "satisfied", "|=", "ge...
Identifies if a variable is represents time
[ "Identifies", "if", "a", "variable", "is", "represents", "time" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/util.py#L421-L429
train
31,457
ioos/compliance-checker
compliance_checker/cf/util.py
is_vertical_coordinate
def is_vertical_coordinate(var_name, var): """ Determines if a variable is a vertical coordinate variable 4.3 A vertical coordinate will be identifiable by: units of pressure; or the presence of the positive attribute with a value of up or down (case insensitive). Optionally, the vertical type may be indicated additionally by providing the standard_name attribute with an appropriate value, and/or the axis attribute with the value Z. """ # Known name satisfied = var_name.lower() in _possiblez satisfied |= getattr(var, 'standard_name', '') in _possiblez # Is the axis set to Z? satisfied |= getattr(var, 'axis', '').lower() == 'z' is_pressure = units_convertible(getattr(var, 'units', '1'), 'dbar') # Pressure defined or positive defined satisfied |= is_pressure if not is_pressure: satisfied |= getattr(var, 'positive', '').lower() in ('up', 'down') return satisfied
python
def is_vertical_coordinate(var_name, var): """ Determines if a variable is a vertical coordinate variable 4.3 A vertical coordinate will be identifiable by: units of pressure; or the presence of the positive attribute with a value of up or down (case insensitive). Optionally, the vertical type may be indicated additionally by providing the standard_name attribute with an appropriate value, and/or the axis attribute with the value Z. """ # Known name satisfied = var_name.lower() in _possiblez satisfied |= getattr(var, 'standard_name', '') in _possiblez # Is the axis set to Z? satisfied |= getattr(var, 'axis', '').lower() == 'z' is_pressure = units_convertible(getattr(var, 'units', '1'), 'dbar') # Pressure defined or positive defined satisfied |= is_pressure if not is_pressure: satisfied |= getattr(var, 'positive', '').lower() in ('up', 'down') return satisfied
[ "def", "is_vertical_coordinate", "(", "var_name", ",", "var", ")", ":", "# Known name", "satisfied", "=", "var_name", ".", "lower", "(", ")", "in", "_possiblez", "satisfied", "|=", "getattr", "(", "var", ",", "'standard_name'", ",", "''", ")", "in", "_possib...
Determines if a variable is a vertical coordinate variable 4.3 A vertical coordinate will be identifiable by: units of pressure; or the presence of the positive attribute with a value of up or down (case insensitive). Optionally, the vertical type may be indicated additionally by providing the standard_name attribute with an appropriate value, and/or the axis attribute with the value Z.
[ "Determines", "if", "a", "variable", "is", "a", "vertical", "coordinate", "variable" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/util.py#L432-L451
train
31,458
ioos/compliance-checker
compliance_checker/acdd.py
ACDDBaseCheck.get_applicable_variables
def get_applicable_variables(self, ds): ''' Returns a list of variable names that are applicable to ACDD Metadata Checks for variables. This includes geophysical and coordinate variables only. :param netCDF4.Dataset ds: An open netCDF dataset ''' if self._applicable_variables is None: self.applicable_variables = cfutil.get_geophysical_variables(ds) varname = cfutil.get_time_variable(ds) # avoid duplicates by checking if already present if varname and (varname not in self.applicable_variables): self.applicable_variables.append(varname) varname = cfutil.get_lon_variable(ds) if varname and (varname not in self.applicable_variables): self.applicable_variables.append(varname) varname = cfutil.get_lat_variable(ds) if varname and (varname not in self.applicable_variables): self.applicable_variables.append(varname) varname = cfutil.get_z_variable(ds) if varname and (varname not in self.applicable_variables): self.applicable_variables.append(varname) return self.applicable_variables
python
def get_applicable_variables(self, ds): ''' Returns a list of variable names that are applicable to ACDD Metadata Checks for variables. This includes geophysical and coordinate variables only. :param netCDF4.Dataset ds: An open netCDF dataset ''' if self._applicable_variables is None: self.applicable_variables = cfutil.get_geophysical_variables(ds) varname = cfutil.get_time_variable(ds) # avoid duplicates by checking if already present if varname and (varname not in self.applicable_variables): self.applicable_variables.append(varname) varname = cfutil.get_lon_variable(ds) if varname and (varname not in self.applicable_variables): self.applicable_variables.append(varname) varname = cfutil.get_lat_variable(ds) if varname and (varname not in self.applicable_variables): self.applicable_variables.append(varname) varname = cfutil.get_z_variable(ds) if varname and (varname not in self.applicable_variables): self.applicable_variables.append(varname) return self.applicable_variables
[ "def", "get_applicable_variables", "(", "self", ",", "ds", ")", ":", "if", "self", ".", "_applicable_variables", "is", "None", ":", "self", ".", "applicable_variables", "=", "cfutil", ".", "get_geophysical_variables", "(", "ds", ")", "varname", "=", "cfutil", ...
Returns a list of variable names that are applicable to ACDD Metadata Checks for variables. This includes geophysical and coordinate variables only. :param netCDF4.Dataset ds: An open netCDF dataset
[ "Returns", "a", "list", "of", "variable", "names", "that", "are", "applicable", "to", "ACDD", "Metadata", "Checks", "for", "variables", ".", "This", "includes", "geophysical", "and", "coordinate", "variables", "only", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/acdd.py#L117-L141
train
31,459
ioos/compliance-checker
compliance_checker/acdd.py
ACDDBaseCheck.check_var_long_name
def check_var_long_name(self, ds): ''' Checks each applicable variable for the long_name attribute :param netCDF4.Dataset ds: An open netCDF dataset ''' results = [] # ACDD Variable Metadata applies to all coordinate variables and # geophysical variables only. for variable in self.get_applicable_variables(ds): msgs = [] long_name = getattr(ds.variables[variable], 'long_name', None) check = long_name is not None if not check: msgs.append("long_name") results.append(Result(BaseCheck.HIGH, check, self._var_header.format(variable), msgs)) return results
python
def check_var_long_name(self, ds): ''' Checks each applicable variable for the long_name attribute :param netCDF4.Dataset ds: An open netCDF dataset ''' results = [] # ACDD Variable Metadata applies to all coordinate variables and # geophysical variables only. for variable in self.get_applicable_variables(ds): msgs = [] long_name = getattr(ds.variables[variable], 'long_name', None) check = long_name is not None if not check: msgs.append("long_name") results.append(Result(BaseCheck.HIGH, check, self._var_header.format(variable), msgs)) return results
[ "def", "check_var_long_name", "(", "self", ",", "ds", ")", ":", "results", "=", "[", "]", "# ACDD Variable Metadata applies to all coordinate variables and", "# geophysical variables only.", "for", "variable", "in", "self", ".", "get_applicable_variables", "(", "ds", ")",...
Checks each applicable variable for the long_name attribute :param netCDF4.Dataset ds: An open netCDF dataset
[ "Checks", "each", "applicable", "variable", "for", "the", "long_name", "attribute" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/acdd.py#L143-L162
train
31,460
ioos/compliance-checker
compliance_checker/acdd.py
ACDDBaseCheck.check_var_standard_name
def check_var_standard_name(self, ds): ''' Checks each applicable variable for the standard_name attribute :param netCDF4.Dataset ds: An open netCDF dataset ''' results = [] for variable in self.get_applicable_variables(ds): msgs = [] std_name = getattr(ds.variables[variable], 'standard_name', None) check = std_name is not None if not check: msgs.append("standard_name") results.append(Result(BaseCheck.HIGH, check, self._var_header.format(variable), msgs)) return results
python
def check_var_standard_name(self, ds): ''' Checks each applicable variable for the standard_name attribute :param netCDF4.Dataset ds: An open netCDF dataset ''' results = [] for variable in self.get_applicable_variables(ds): msgs = [] std_name = getattr(ds.variables[variable], 'standard_name', None) check = std_name is not None if not check: msgs.append("standard_name") results.append(Result(BaseCheck.HIGH, check, self._var_header.format(variable), msgs)) return results
[ "def", "check_var_standard_name", "(", "self", ",", "ds", ")", ":", "results", "=", "[", "]", "for", "variable", "in", "self", ".", "get_applicable_variables", "(", "ds", ")", ":", "msgs", "=", "[", "]", "std_name", "=", "getattr", "(", "ds", ".", "var...
Checks each applicable variable for the standard_name attribute :param netCDF4.Dataset ds: An open netCDF dataset
[ "Checks", "each", "applicable", "variable", "for", "the", "standard_name", "attribute" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/acdd.py#L164-L179
train
31,461
ioos/compliance-checker
compliance_checker/acdd.py
ACDDBaseCheck.check_var_units
def check_var_units(self, ds): ''' Checks each applicable variable for the units attribute :param netCDF4.Dataset ds: An open netCDF dataset ''' results = [] for variable in self.get_applicable_variables(ds): msgs = [] # Check units and dims for variable unit_check = hasattr(ds.variables[variable], 'units') no_dim_check = (getattr(ds.variables[variable], 'dimensions') == tuple()) # Check if we have no dimensions. If no dims, skip test if no_dim_check: continue # Check if we have no units if not unit_check: msgs.append("units") results.append(Result(BaseCheck.HIGH, unit_check, self._var_header.format(variable), msgs)) return results
python
def check_var_units(self, ds): ''' Checks each applicable variable for the units attribute :param netCDF4.Dataset ds: An open netCDF dataset ''' results = [] for variable in self.get_applicable_variables(ds): msgs = [] # Check units and dims for variable unit_check = hasattr(ds.variables[variable], 'units') no_dim_check = (getattr(ds.variables[variable], 'dimensions') == tuple()) # Check if we have no dimensions. If no dims, skip test if no_dim_check: continue # Check if we have no units if not unit_check: msgs.append("units") results.append(Result(BaseCheck.HIGH, unit_check, self._var_header.format(variable), msgs)) return results
[ "def", "check_var_units", "(", "self", ",", "ds", ")", ":", "results", "=", "[", "]", "for", "variable", "in", "self", ".", "get_applicable_variables", "(", "ds", ")", ":", "msgs", "=", "[", "]", "# Check units and dims for variable", "unit_check", "=", "has...
Checks each applicable variable for the units attribute :param netCDF4.Dataset ds: An open netCDF dataset
[ "Checks", "each", "applicable", "variable", "for", "the", "units", "attribute" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/acdd.py#L181-L201
train
31,462
ioos/compliance-checker
compliance_checker/acdd.py
ACDDBaseCheck.verify_geospatial_bounds
def verify_geospatial_bounds(self, ds): """Checks that the geospatial bounds is well formed OGC WKT""" var = getattr(ds, 'geospatial_bounds', None) check = var is not None if not check: return ratable_result(False, "Global Attributes", # grouped with Globals ["geospatial_bounds not present"]) try: # TODO: verify that WKT is valid given CRS (defaults to EPSG:4326 # in ACDD. from_wkt(ds.geospatial_bounds) except AttributeError: return ratable_result(False, "Global Attributes", # grouped with Globals ['Could not parse WKT, possible bad value for WKT']) # parsed OK else: return ratable_result(True, "Global Attributes", tuple())
python
def verify_geospatial_bounds(self, ds): """Checks that the geospatial bounds is well formed OGC WKT""" var = getattr(ds, 'geospatial_bounds', None) check = var is not None if not check: return ratable_result(False, "Global Attributes", # grouped with Globals ["geospatial_bounds not present"]) try: # TODO: verify that WKT is valid given CRS (defaults to EPSG:4326 # in ACDD. from_wkt(ds.geospatial_bounds) except AttributeError: return ratable_result(False, "Global Attributes", # grouped with Globals ['Could not parse WKT, possible bad value for WKT']) # parsed OK else: return ratable_result(True, "Global Attributes", tuple())
[ "def", "verify_geospatial_bounds", "(", "self", ",", "ds", ")", ":", "var", "=", "getattr", "(", "ds", ",", "'geospatial_bounds'", ",", "None", ")", "check", "=", "var", "is", "not", "None", "if", "not", "check", ":", "return", "ratable_result", "(", "Fa...
Checks that the geospatial bounds is well formed OGC WKT
[ "Checks", "that", "the", "geospatial", "bounds", "is", "well", "formed", "OGC", "WKT" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/acdd.py#L376-L395
train
31,463
ioos/compliance-checker
compliance_checker/acdd.py
ACDDBaseCheck._check_total_z_extents
def _check_total_z_extents(self, ds, z_variable): ''' Check the entire array of Z for minimum and maximum and compare that to the vertical extents defined in the global attributes :param netCDF4.Dataset ds: An open netCDF dataset :param str z_variable: Name of the variable representing the Z-Axis ''' msgs = [] total = 2 try: vert_min = float(ds.geospatial_vertical_min) except ValueError: msgs.append('geospatial_vertical_min cannot be cast to float') try: vert_max = float(ds.geospatial_vertical_max) except ValueError: msgs.append('geospatial_vertical_max cannot be cast to float') if len(msgs) > 0: return Result(BaseCheck.MEDIUM, (0, total), 'geospatial_vertical_extents_match', msgs) zvalue = ds.variables[z_variable][:] # If the array has fill values, which is allowed in the case of point # features if hasattr(zvalue, 'mask'): zvalue = zvalue[~zvalue.mask] if zvalue.size == 0: msgs.append("Cannot compare geospatial vertical extents " "against min/max of data, as non-masked data " "length is zero") return Result(BaseCheck.MEDIUM, (0, total), 'geospatial_vertical_extents_match', msgs) else: zmin = zvalue.min() zmax = zvalue.max() if not np.isclose(vert_min, zmin): msgs.append("geospatial_vertical_min != min(%s) values, %s != %s" % ( z_variable, vert_min, zmin )) if not np.isclose(vert_max, zmax): msgs.append("geospatial_vertical_max != max(%s) values, %s != %s" % ( z_variable, vert_min, zmax )) return Result(BaseCheck.MEDIUM, (total - len(msgs), total), 'geospatial_vertical_extents_match', msgs)
python
def _check_total_z_extents(self, ds, z_variable): ''' Check the entire array of Z for minimum and maximum and compare that to the vertical extents defined in the global attributes :param netCDF4.Dataset ds: An open netCDF dataset :param str z_variable: Name of the variable representing the Z-Axis ''' msgs = [] total = 2 try: vert_min = float(ds.geospatial_vertical_min) except ValueError: msgs.append('geospatial_vertical_min cannot be cast to float') try: vert_max = float(ds.geospatial_vertical_max) except ValueError: msgs.append('geospatial_vertical_max cannot be cast to float') if len(msgs) > 0: return Result(BaseCheck.MEDIUM, (0, total), 'geospatial_vertical_extents_match', msgs) zvalue = ds.variables[z_variable][:] # If the array has fill values, which is allowed in the case of point # features if hasattr(zvalue, 'mask'): zvalue = zvalue[~zvalue.mask] if zvalue.size == 0: msgs.append("Cannot compare geospatial vertical extents " "against min/max of data, as non-masked data " "length is zero") return Result(BaseCheck.MEDIUM, (0, total), 'geospatial_vertical_extents_match', msgs) else: zmin = zvalue.min() zmax = zvalue.max() if not np.isclose(vert_min, zmin): msgs.append("geospatial_vertical_min != min(%s) values, %s != %s" % ( z_variable, vert_min, zmin )) if not np.isclose(vert_max, zmax): msgs.append("geospatial_vertical_max != max(%s) values, %s != %s" % ( z_variable, vert_min, zmax )) return Result(BaseCheck.MEDIUM, (total - len(msgs), total), 'geospatial_vertical_extents_match', msgs)
[ "def", "_check_total_z_extents", "(", "self", ",", "ds", ",", "z_variable", ")", ":", "msgs", "=", "[", "]", "total", "=", "2", "try", ":", "vert_min", "=", "float", "(", "ds", ".", "geospatial_vertical_min", ")", "except", "ValueError", ":", "msgs", "."...
Check the entire array of Z for minimum and maximum and compare that to the vertical extents defined in the global attributes :param netCDF4.Dataset ds: An open netCDF dataset :param str z_variable: Name of the variable representing the Z-Axis
[ "Check", "the", "entire", "array", "of", "Z", "for", "minimum", "and", "maximum", "and", "compare", "that", "to", "the", "vertical", "extents", "defined", "in", "the", "global", "attributes" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/acdd.py#L397-L451
train
31,464
ioos/compliance-checker
compliance_checker/acdd.py
ACDDBaseCheck._check_scalar_vertical_extents
def _check_scalar_vertical_extents(self, ds, z_variable): ''' Check the scalar value of Z compared to the vertical extents which should also be equivalent :param netCDF4.Dataset ds: An open netCDF dataset :param str z_variable: Name of the variable representing the Z-Axis ''' vert_min = ds.geospatial_vertical_min vert_max = ds.geospatial_vertical_max msgs = [] total = 2 zvalue = ds.variables[z_variable][:].item() if not np.isclose(vert_min, vert_max): msgs.append("geospatial_vertical_min != geospatial_vertical_max for scalar depth values, %s != %s" % ( vert_min, vert_max )) if not np.isclose(vert_max, zvalue): msgs.append("geospatial_vertical_max != %s values, %s != %s" % ( z_variable, vert_max, zvalue )) return Result(BaseCheck.MEDIUM, (total - len(msgs), total), 'geospatial_vertical_extents_match', msgs)
python
def _check_scalar_vertical_extents(self, ds, z_variable): ''' Check the scalar value of Z compared to the vertical extents which should also be equivalent :param netCDF4.Dataset ds: An open netCDF dataset :param str z_variable: Name of the variable representing the Z-Axis ''' vert_min = ds.geospatial_vertical_min vert_max = ds.geospatial_vertical_max msgs = [] total = 2 zvalue = ds.variables[z_variable][:].item() if not np.isclose(vert_min, vert_max): msgs.append("geospatial_vertical_min != geospatial_vertical_max for scalar depth values, %s != %s" % ( vert_min, vert_max )) if not np.isclose(vert_max, zvalue): msgs.append("geospatial_vertical_max != %s values, %s != %s" % ( z_variable, vert_max, zvalue )) return Result(BaseCheck.MEDIUM, (total - len(msgs), total), 'geospatial_vertical_extents_match', msgs)
[ "def", "_check_scalar_vertical_extents", "(", "self", ",", "ds", ",", "z_variable", ")", ":", "vert_min", "=", "ds", ".", "geospatial_vertical_min", "vert_max", "=", "ds", ".", "geospatial_vertical_max", "msgs", "=", "[", "]", "total", "=", "2", "zvalue", "=",...
Check the scalar value of Z compared to the vertical extents which should also be equivalent :param netCDF4.Dataset ds: An open netCDF dataset :param str z_variable: Name of the variable representing the Z-Axis
[ "Check", "the", "scalar", "value", "of", "Z", "compared", "to", "the", "vertical", "extents", "which", "should", "also", "be", "equivalent" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/acdd.py#L453-L483
train
31,465
ioos/compliance-checker
compliance_checker/acdd.py
ACDDBaseCheck.verify_convention_version
def verify_convention_version(self, ds): """ Verify that the version in the Conventions field is correct """ try: for convention in getattr(ds, "Conventions", '').replace(' ', '').split(','): if convention == 'ACDD-' + self._cc_spec_version: return ratable_result((2, 2), None, []) # name=None so grouped with Globals # if no/wrong ACDD convention, return appropriate result # Result will have name "Global Attributes" to group with globals m = ["Conventions does not contain 'ACDD-{}'".format(self._cc_spec_version)] return ratable_result((1, 2), "Global Attributes", m) except AttributeError: # NetCDF attribute not found m = ["No Conventions attribute present; must contain ACDD-{}".format(self._cc_spec_version)] # Result will have name "Global Attributes" to group with globals return ratable_result((0, 2), "Global Attributes", m)
python
def verify_convention_version(self, ds): """ Verify that the version in the Conventions field is correct """ try: for convention in getattr(ds, "Conventions", '').replace(' ', '').split(','): if convention == 'ACDD-' + self._cc_spec_version: return ratable_result((2, 2), None, []) # name=None so grouped with Globals # if no/wrong ACDD convention, return appropriate result # Result will have name "Global Attributes" to group with globals m = ["Conventions does not contain 'ACDD-{}'".format(self._cc_spec_version)] return ratable_result((1, 2), "Global Attributes", m) except AttributeError: # NetCDF attribute not found m = ["No Conventions attribute present; must contain ACDD-{}".format(self._cc_spec_version)] # Result will have name "Global Attributes" to group with globals return ratable_result((0, 2), "Global Attributes", m)
[ "def", "verify_convention_version", "(", "self", ",", "ds", ")", ":", "try", ":", "for", "convention", "in", "getattr", "(", "ds", ",", "\"Conventions\"", ",", "''", ")", ".", "replace", "(", "' '", ",", "''", ")", ".", "split", "(", "','", ")", ":",...
Verify that the version in the Conventions field is correct
[ "Verify", "that", "the", "version", "in", "the", "Conventions", "field", "is", "correct" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/acdd.py#L566-L582
train
31,466
ioos/compliance-checker
compliance_checker/acdd.py
ACDD1_3Check.check_metadata_link
def check_metadata_link(self, ds): ''' Checks if metadata link is formed in a rational manner :param netCDF4.Dataset ds: An open netCDF dataset ''' if not hasattr(ds, u'metadata_link'): return msgs = [] meta_link = getattr(ds, 'metadata_link') if 'http' not in meta_link: msgs.append('Metadata URL should include http:// or https://') valid_link = (len(msgs) == 0) return Result(BaseCheck.LOW, valid_link, 'metadata_link_valid', msgs)
python
def check_metadata_link(self, ds): ''' Checks if metadata link is formed in a rational manner :param netCDF4.Dataset ds: An open netCDF dataset ''' if not hasattr(ds, u'metadata_link'): return msgs = [] meta_link = getattr(ds, 'metadata_link') if 'http' not in meta_link: msgs.append('Metadata URL should include http:// or https://') valid_link = (len(msgs) == 0) return Result(BaseCheck.LOW, valid_link, 'metadata_link_valid', msgs)
[ "def", "check_metadata_link", "(", "self", ",", "ds", ")", ":", "if", "not", "hasattr", "(", "ds", ",", "u'metadata_link'", ")", ":", "return", "msgs", "=", "[", "]", "meta_link", "=", "getattr", "(", "ds", ",", "'metadata_link'", ")", "if", "'http'", ...
Checks if metadata link is formed in a rational manner :param netCDF4.Dataset ds: An open netCDF dataset
[ "Checks", "if", "metadata", "link", "is", "formed", "in", "a", "rational", "manner" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/acdd.py#L679-L692
train
31,467
ioos/compliance-checker
compliance_checker/acdd.py
ACDD1_3Check.check_id_has_no_blanks
def check_id_has_no_blanks(self, ds): ''' Check if there are blanks in the id field :param netCDF4.Dataset ds: An open netCDF dataset ''' if not hasattr(ds, u'id'): return if ' ' in getattr(ds, u'id'): return Result(BaseCheck.MEDIUM, False, 'no_blanks_in_id', msgs=[u'There should be no blanks in the id field']) else: return Result(BaseCheck.MEDIUM, True, 'no_blanks_in_id', msgs=[])
python
def check_id_has_no_blanks(self, ds): ''' Check if there are blanks in the id field :param netCDF4.Dataset ds: An open netCDF dataset ''' if not hasattr(ds, u'id'): return if ' ' in getattr(ds, u'id'): return Result(BaseCheck.MEDIUM, False, 'no_blanks_in_id', msgs=[u'There should be no blanks in the id field']) else: return Result(BaseCheck.MEDIUM, True, 'no_blanks_in_id', msgs=[])
[ "def", "check_id_has_no_blanks", "(", "self", ",", "ds", ")", ":", "if", "not", "hasattr", "(", "ds", ",", "u'id'", ")", ":", "return", "if", "' '", "in", "getattr", "(", "ds", ",", "u'id'", ")", ":", "return", "Result", "(", "BaseCheck", ".", "MEDIU...
Check if there are blanks in the id field :param netCDF4.Dataset ds: An open netCDF dataset
[ "Check", "if", "there", "are", "blanks", "in", "the", "id", "field" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/acdd.py#L694-L706
train
31,468
ioos/compliance-checker
compliance_checker/acdd.py
ACDD1_3Check.check_var_coverage_content_type
def check_var_coverage_content_type(self, ds): ''' Check coverage content type against valid ISO-19115-1 codes :param netCDF4.Dataset ds: An open netCDF dataset ''' results = [] for variable in cfutil.get_geophysical_variables(ds): msgs = [] ctype = getattr(ds.variables[variable], 'coverage_content_type', None) check = ctype is not None if not check: msgs.append("coverage_content_type") results.append(Result(BaseCheck.HIGH, check, self._var_header.format(variable), msgs)) continue # ISO 19115-1 codes valid_ctypes = { 'image', 'thematicClassification', 'physicalMeasurement', 'auxiliaryInformation', 'qualityInformation', 'referenceInformation', 'modelResult', 'coordinate' } if ctype not in valid_ctypes: msgs.append("coverage_content_type in \"%s\"" % (variable, sorted(valid_ctypes))) results.append(Result(BaseCheck.HIGH, check, # append to list self._var_header.format(variable), msgs)) return results
python
def check_var_coverage_content_type(self, ds): ''' Check coverage content type against valid ISO-19115-1 codes :param netCDF4.Dataset ds: An open netCDF dataset ''' results = [] for variable in cfutil.get_geophysical_variables(ds): msgs = [] ctype = getattr(ds.variables[variable], 'coverage_content_type', None) check = ctype is not None if not check: msgs.append("coverage_content_type") results.append(Result(BaseCheck.HIGH, check, self._var_header.format(variable), msgs)) continue # ISO 19115-1 codes valid_ctypes = { 'image', 'thematicClassification', 'physicalMeasurement', 'auxiliaryInformation', 'qualityInformation', 'referenceInformation', 'modelResult', 'coordinate' } if ctype not in valid_ctypes: msgs.append("coverage_content_type in \"%s\"" % (variable, sorted(valid_ctypes))) results.append(Result(BaseCheck.HIGH, check, # append to list self._var_header.format(variable), msgs)) return results
[ "def", "check_var_coverage_content_type", "(", "self", ",", "ds", ")", ":", "results", "=", "[", "]", "for", "variable", "in", "cfutil", ".", "get_geophysical_variables", "(", "ds", ")", ":", "msgs", "=", "[", "]", "ctype", "=", "getattr", "(", "ds", "."...
Check coverage content type against valid ISO-19115-1 codes :param netCDF4.Dataset ds: An open netCDF dataset
[ "Check", "coverage", "content", "type", "against", "valid", "ISO", "-", "19115", "-", "1", "codes" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/acdd.py#L708-L743
train
31,469
ioos/compliance-checker
compliance_checker/cf/appendix_d.py
no_missing_terms
def no_missing_terms(formula_name, term_set): """ Returns true if the set is not missing terms corresponding to the entries in Appendix D, False otherwise. The set of terms should be exactly equal, and not contain more or less terms than expected. """ reqd_terms = dimless_vertical_coordinates[formula_name] def has_all_terms(reqd_termset): return len(reqd_termset ^ term_set) == 0 if isinstance(reqd_terms, set): return has_all_terms(reqd_terms) # if it's not a set, it's likely some other form of iterable with multiple # possible definitions i.e. a/ap are interchangeable in else: return any(has_all_terms(req) for req in reqd_terms)
python
def no_missing_terms(formula_name, term_set): """ Returns true if the set is not missing terms corresponding to the entries in Appendix D, False otherwise. The set of terms should be exactly equal, and not contain more or less terms than expected. """ reqd_terms = dimless_vertical_coordinates[formula_name] def has_all_terms(reqd_termset): return len(reqd_termset ^ term_set) == 0 if isinstance(reqd_terms, set): return has_all_terms(reqd_terms) # if it's not a set, it's likely some other form of iterable with multiple # possible definitions i.e. a/ap are interchangeable in else: return any(has_all_terms(req) for req in reqd_terms)
[ "def", "no_missing_terms", "(", "formula_name", ",", "term_set", ")", ":", "reqd_terms", "=", "dimless_vertical_coordinates", "[", "formula_name", "]", "def", "has_all_terms", "(", "reqd_termset", ")", ":", "return", "len", "(", "reqd_termset", "^", "term_set", ")...
Returns true if the set is not missing terms corresponding to the entries in Appendix D, False otherwise. The set of terms should be exactly equal, and not contain more or less terms than expected.
[ "Returns", "true", "if", "the", "set", "is", "not", "missing", "terms", "corresponding", "to", "the", "entries", "in", "Appendix", "D", "False", "otherwise", ".", "The", "set", "of", "terms", "should", "be", "exactly", "equal", "and", "not", "contain", "mo...
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/cf/appendix_d.py#L47-L62
train
31,470
ioos/compliance-checker
compliance_checker/base.py
attr_check
def attr_check(kvp, ds, priority, ret_val, gname=None): """ Handles attribute checks for simple presence of an attribute, presence of one of several attributes, and passing a validation function. Returns a status along with an error message in the event of a failure. Mutates ret_val parameter :param tuple(str, func) or str l: the attribute being checked :param netCDF4 dataset ds : dataset being checked :param int priority : priority level of check :param list ret_val : result to be returned :param str or None gname : group name assigned to a group of attribute Results """ msgs = [] name, other = kvp if other is None: res = std_check(ds, name) if not res: msgs = ["%s not present" % name] else: try: # see if this attribute is a string, try stripping # whitespace, and return an error if empty att_strip = getattr(ds, name).strip() if not att_strip: res = False msgs = ["%s is empty or completely whitespace" % name] # if not a string/has no strip method we should be OK except AttributeError: pass # gname arg allows the global attrs to be grouped together ret_val.append(Result( priority, value=res, name=gname if gname else name, msgs=msgs )) elif hasattr(other, '__iter__'): # redundant, we could easily do this with a hasattr # check instead res = std_check_in(ds, name, other) if res == 0: msgs.append("%s not present" % name) elif res == 1: msgs.append("%s present, but not in expected value list (%s)" % (name, other)) ret_val.append( Result( priority, (res, 2), gname if gname else name, # groups Globals if supplied msgs ) ) # if we have an XPath expression, call it on the document elif type(other) is etree.XPath: # TODO: store tree instead of creating it each time? res = xpath_check(ds._root, other) if not res: msgs = ["XPath for {} not found".format(name)] ret_val.append( Result( priority, res, gname if gname else name, msgs ) ) # if the attribute is a function, call it # right now only supports single attribute # important note: current magic approach uses all functions # starting with "check". Avoid naming check functions # starting with check if you want to pass them in with # a tuple to avoid them being checked more than once elif hasattr(other, '__call__'): # check that the attribute is actually present. # This reduces boilerplate in functions by not needing # to check whether the attribute is present every time # and instead focuses on the core functionality of the # test res = other(ds) # call the method on the dataset if not res: msgs = ["%s not present" % name] ret_val.append( Result( priority, res, gname if gname else name, msgs ) ) else: ret_val.append(res(priority)) # unsupported second type in second else: raise TypeError("Second arg in tuple has unsupported type: {}".format(type(other))) return ret_val
python
def attr_check(kvp, ds, priority, ret_val, gname=None): """ Handles attribute checks for simple presence of an attribute, presence of one of several attributes, and passing a validation function. Returns a status along with an error message in the event of a failure. Mutates ret_val parameter :param tuple(str, func) or str l: the attribute being checked :param netCDF4 dataset ds : dataset being checked :param int priority : priority level of check :param list ret_val : result to be returned :param str or None gname : group name assigned to a group of attribute Results """ msgs = [] name, other = kvp if other is None: res = std_check(ds, name) if not res: msgs = ["%s not present" % name] else: try: # see if this attribute is a string, try stripping # whitespace, and return an error if empty att_strip = getattr(ds, name).strip() if not att_strip: res = False msgs = ["%s is empty or completely whitespace" % name] # if not a string/has no strip method we should be OK except AttributeError: pass # gname arg allows the global attrs to be grouped together ret_val.append(Result( priority, value=res, name=gname if gname else name, msgs=msgs )) elif hasattr(other, '__iter__'): # redundant, we could easily do this with a hasattr # check instead res = std_check_in(ds, name, other) if res == 0: msgs.append("%s not present" % name) elif res == 1: msgs.append("%s present, but not in expected value list (%s)" % (name, other)) ret_val.append( Result( priority, (res, 2), gname if gname else name, # groups Globals if supplied msgs ) ) # if we have an XPath expression, call it on the document elif type(other) is etree.XPath: # TODO: store tree instead of creating it each time? res = xpath_check(ds._root, other) if not res: msgs = ["XPath for {} not found".format(name)] ret_val.append( Result( priority, res, gname if gname else name, msgs ) ) # if the attribute is a function, call it # right now only supports single attribute # important note: current magic approach uses all functions # starting with "check". Avoid naming check functions # starting with check if you want to pass them in with # a tuple to avoid them being checked more than once elif hasattr(other, '__call__'): # check that the attribute is actually present. # This reduces boilerplate in functions by not needing # to check whether the attribute is present every time # and instead focuses on the core functionality of the # test res = other(ds) # call the method on the dataset if not res: msgs = ["%s not present" % name] ret_val.append( Result( priority, res, gname if gname else name, msgs ) ) else: ret_val.append(res(priority)) # unsupported second type in second else: raise TypeError("Second arg in tuple has unsupported type: {}".format(type(other))) return ret_val
[ "def", "attr_check", "(", "kvp", ",", "ds", ",", "priority", ",", "ret_val", ",", "gname", "=", "None", ")", ":", "msgs", "=", "[", "]", "name", ",", "other", "=", "kvp", "if", "other", "is", "None", ":", "res", "=", "std_check", "(", "ds", ",", ...
Handles attribute checks for simple presence of an attribute, presence of one of several attributes, and passing a validation function. Returns a status along with an error message in the event of a failure. Mutates ret_val parameter :param tuple(str, func) or str l: the attribute being checked :param netCDF4 dataset ds : dataset being checked :param int priority : priority level of check :param list ret_val : result to be returned :param str or None gname : group name assigned to a group of attribute Results
[ "Handles", "attribute", "checks", "for", "simple", "presence", "of", "an", "attribute", "presence", "of", "one", "of", "several", "attributes", "and", "passing", "a", "validation", "function", ".", "Returns", "a", "status", "along", "with", "an", "error", "mes...
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/base.py#L235-L336
train
31,471
ioos/compliance-checker
compliance_checker/base.py
fix_return_value
def fix_return_value(v, method_name, method=None, checker=None): """ Transforms scalar return values into Result. """ # remove common check prefix method_name = (method_name or method.__func__.__name__).replace("check_","") if v is None or not isinstance(v, Result): v = Result(value=v, name=method_name) v.name = v.name or method_name v.checker = checker v.check_method = method return v
python
def fix_return_value(v, method_name, method=None, checker=None): """ Transforms scalar return values into Result. """ # remove common check prefix method_name = (method_name or method.__func__.__name__).replace("check_","") if v is None or not isinstance(v, Result): v = Result(value=v, name=method_name) v.name = v.name or method_name v.checker = checker v.check_method = method return v
[ "def", "fix_return_value", "(", "v", ",", "method_name", ",", "method", "=", "None", ",", "checker", "=", "None", ")", ":", "# remove common check prefix", "method_name", "=", "(", "method_name", "or", "method", ".", "__func__", ".", "__name__", ")", ".", "r...
Transforms scalar return values into Result.
[ "Transforms", "scalar", "return", "values", "into", "Result", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/base.py#L360-L374
train
31,472
ioos/compliance-checker
compliance_checker/base.py
ratable_result
def ratable_result(value, name, msgs): """Returns a partial function with a Result that has not been weighted.""" return lambda w: Result(w, value, name, msgs)
python
def ratable_result(value, name, msgs): """Returns a partial function with a Result that has not been weighted.""" return lambda w: Result(w, value, name, msgs)
[ "def", "ratable_result", "(", "value", ",", "name", ",", "msgs", ")", ":", "return", "lambda", "w", ":", "Result", "(", "w", ",", "value", ",", "name", ",", "msgs", ")" ]
Returns a partial function with a Result that has not been weighted.
[ "Returns", "a", "partial", "function", "with", "a", "Result", "that", "has", "not", "been", "weighted", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/base.py#L377-L379
train
31,473
ioos/compliance-checker
compliance_checker/base.py
score_group
def score_group(group_name=None): ''' Warning this is deprecated as of Compliance Checker v3.2! Please do not using scoring groups and update your plugins if necessary ''' warnings.warn('Score_group is deprecated as of Compliance Checker v3.2.') def _inner(func): def _dec(s, ds): ret_val = func(s, ds) """ if group_name != None and not isinstance(ret_val[0], tuple): return tuple([(group_name, ret_val[0])] + list(ret_val[1:])) """ # multiple returns if not isinstance(ret_val, list): ret_val = [ret_val] def dogroup(r): cur_grouping = r.name if isinstance(cur_grouping, tuple): cur_grouping = list(cur_grouping) elif not isinstance(cur_grouping, list): cur_grouping = [cur_grouping] cur_grouping.insert(0, group_name) return Result(r.weight, r.value, tuple(cur_grouping), r.msgs) ret_val = [fix_return_value(x, func.__name__, func, s) for x in ret_val] ret_val = list(map(dogroup, ret_val)) return ret_val return wraps(func)(_dec) return _inner
python
def score_group(group_name=None): ''' Warning this is deprecated as of Compliance Checker v3.2! Please do not using scoring groups and update your plugins if necessary ''' warnings.warn('Score_group is deprecated as of Compliance Checker v3.2.') def _inner(func): def _dec(s, ds): ret_val = func(s, ds) """ if group_name != None and not isinstance(ret_val[0], tuple): return tuple([(group_name, ret_val[0])] + list(ret_val[1:])) """ # multiple returns if not isinstance(ret_val, list): ret_val = [ret_val] def dogroup(r): cur_grouping = r.name if isinstance(cur_grouping, tuple): cur_grouping = list(cur_grouping) elif not isinstance(cur_grouping, list): cur_grouping = [cur_grouping] cur_grouping.insert(0, group_name) return Result(r.weight, r.value, tuple(cur_grouping), r.msgs) ret_val = [fix_return_value(x, func.__name__, func, s) for x in ret_val] ret_val = list(map(dogroup, ret_val)) return ret_val return wraps(func)(_dec) return _inner
[ "def", "score_group", "(", "group_name", "=", "None", ")", ":", "warnings", ".", "warn", "(", "'Score_group is deprecated as of Compliance Checker v3.2.'", ")", "def", "_inner", "(", "func", ")", ":", "def", "_dec", "(", "s", ",", "ds", ")", ":", "ret_val", ...
Warning this is deprecated as of Compliance Checker v3.2! Please do not using scoring groups and update your plugins if necessary
[ "Warning", "this", "is", "deprecated", "as", "of", "Compliance", "Checker", "v3", ".", "2!" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/base.py#L382-L419
train
31,474
ioos/compliance-checker
compliance_checker/base.py
Result.serialize
def serialize(self): ''' Returns a serializable dictionary that represents the result object ''' return { 'name' : self.name, 'weight' : self.weight, 'value' : self.value, 'msgs' : self.msgs, 'children' : [i.serialize() for i in self.children] }
python
def serialize(self): ''' Returns a serializable dictionary that represents the result object ''' return { 'name' : self.name, 'weight' : self.weight, 'value' : self.value, 'msgs' : self.msgs, 'children' : [i.serialize() for i in self.children] }
[ "def", "serialize", "(", "self", ")", ":", "return", "{", "'name'", ":", "self", ".", "name", ",", "'weight'", ":", "self", ".", "weight", ",", "'value'", ":", "self", ".", "value", ",", "'msgs'", ":", "self", ".", "msgs", ",", "'children'", ":", "...
Returns a serializable dictionary that represents the result object
[ "Returns", "a", "serializable", "dictionary", "that", "represents", "the", "result", "object" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/base.py#L160-L170
train
31,475
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite._get_generator_plugins
def _get_generator_plugins(cls): """ Return a list of classes from external plugins that are used to generate checker classes """ if not hasattr(cls, 'suite_generators'): gens = working_set.iter_entry_points('compliance_checker.generators') cls.suite_generators = [x.resolve() for x in gens] return cls.suite_generators
python
def _get_generator_plugins(cls): """ Return a list of classes from external plugins that are used to generate checker classes """ if not hasattr(cls, 'suite_generators'): gens = working_set.iter_entry_points('compliance_checker.generators') cls.suite_generators = [x.resolve() for x in gens] return cls.suite_generators
[ "def", "_get_generator_plugins", "(", "cls", ")", ":", "if", "not", "hasattr", "(", "cls", ",", "'suite_generators'", ")", ":", "gens", "=", "working_set", ".", "iter_entry_points", "(", "'compliance_checker.generators'", ")", "cls", ".", "suite_generators", "=", ...
Return a list of classes from external plugins that are used to generate checker classes
[ "Return", "a", "list", "of", "classes", "from", "external", "plugins", "that", "are", "used", "to", "generate", "checker", "classes" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L49-L59
train
31,476
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.load_generated_checkers
def load_generated_checkers(cls, args): """ Load checker classes from generator plugins """ for gen in cls._get_generator_plugins(): checkers = gen.get_checkers(args) cls.checkers.update(checkers)
python
def load_generated_checkers(cls, args): """ Load checker classes from generator plugins """ for gen in cls._get_generator_plugins(): checkers = gen.get_checkers(args) cls.checkers.update(checkers)
[ "def", "load_generated_checkers", "(", "cls", ",", "args", ")", ":", "for", "gen", "in", "cls", ".", "_get_generator_plugins", "(", ")", ":", "checkers", "=", "gen", ".", "get_checkers", "(", "args", ")", "cls", ".", "checkers", ".", "update", "(", "chec...
Load checker classes from generator plugins
[ "Load", "checker", "classes", "from", "generator", "plugins" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L71-L78
train
31,477
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.load_all_available_checkers
def load_all_available_checkers(cls): """ Helper method to retrieve all sub checker classes derived from various base classes. """ for x in working_set.iter_entry_points('compliance_checker.suites'): try: xl = x.resolve() cls.checkers[':'.join((xl._cc_spec, xl._cc_spec_version))] = xl # TODO: remove this once all checkers move over to the new # _cc_spec, _cc_spec_version except AttributeError: # if there are versioned classes, it will get overwritten by the # latest version later. If there are not, it will be assigned # the checker as the main class # TODO: nix name attribute in plugins. Keeping in for now # to provide backwards compatibility cls.checkers[getattr(xl, 'name', None) or xl._cc_spec] = xl except Exception as e: print("Could not load", x, ":", e, file=sys.stderr) # find the latest version of versioned checkers and set that as the # default checker for compliance checker if no version is specified ver_checkers = sorted([c.split(':', 1) for c in cls.checkers if ':' in c]) for spec, versions in itertools.groupby(ver_checkers, itemgetter(0)): version_nums = [v[-1] for v in versions] try: latest_version = str(max(StrictVersion(v) for v in version_nums)) # if the version can't be parsed as a StrictVersion, parse # according to character collation except ValueError: latest_version = max(version_nums) cls.checkers[spec] = cls.checkers[spec + ':latest'] = \ cls.checkers[':'.join((spec, latest_version))]
python
def load_all_available_checkers(cls): """ Helper method to retrieve all sub checker classes derived from various base classes. """ for x in working_set.iter_entry_points('compliance_checker.suites'): try: xl = x.resolve() cls.checkers[':'.join((xl._cc_spec, xl._cc_spec_version))] = xl # TODO: remove this once all checkers move over to the new # _cc_spec, _cc_spec_version except AttributeError: # if there are versioned classes, it will get overwritten by the # latest version later. If there are not, it will be assigned # the checker as the main class # TODO: nix name attribute in plugins. Keeping in for now # to provide backwards compatibility cls.checkers[getattr(xl, 'name', None) or xl._cc_spec] = xl except Exception as e: print("Could not load", x, ":", e, file=sys.stderr) # find the latest version of versioned checkers and set that as the # default checker for compliance checker if no version is specified ver_checkers = sorted([c.split(':', 1) for c in cls.checkers if ':' in c]) for spec, versions in itertools.groupby(ver_checkers, itemgetter(0)): version_nums = [v[-1] for v in versions] try: latest_version = str(max(StrictVersion(v) for v in version_nums)) # if the version can't be parsed as a StrictVersion, parse # according to character collation except ValueError: latest_version = max(version_nums) cls.checkers[spec] = cls.checkers[spec + ':latest'] = \ cls.checkers[':'.join((spec, latest_version))]
[ "def", "load_all_available_checkers", "(", "cls", ")", ":", "for", "x", "in", "working_set", ".", "iter_entry_points", "(", "'compliance_checker.suites'", ")", ":", "try", ":", "xl", "=", "x", ".", "resolve", "(", ")", "cls", ".", "checkers", "[", "':'", "...
Helper method to retrieve all sub checker classes derived from various base classes.
[ "Helper", "method", "to", "retrieve", "all", "sub", "checker", "classes", "derived", "from", "various", "base", "classes", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L81-L116
train
31,478
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite._get_checks
def _get_checks(self, checkclass, skip_checks): """ Helper method to retreive check methods from a Checker class. Excludes any checks in `skip_checks`. The name of the methods in the Checker class should start with "check_" for this method to find them. """ meths = inspect.getmembers(checkclass, inspect.ismethod) # return all check methods not among the skipped checks returned_checks = [] for fn_name, fn_obj in meths: if (fn_name.startswith("check_") and skip_checks[fn_name] != BaseCheck.HIGH): returned_checks.append((fn_obj, skip_checks[fn_name])) return returned_checks
python
def _get_checks(self, checkclass, skip_checks): """ Helper method to retreive check methods from a Checker class. Excludes any checks in `skip_checks`. The name of the methods in the Checker class should start with "check_" for this method to find them. """ meths = inspect.getmembers(checkclass, inspect.ismethod) # return all check methods not among the skipped checks returned_checks = [] for fn_name, fn_obj in meths: if (fn_name.startswith("check_") and skip_checks[fn_name] != BaseCheck.HIGH): returned_checks.append((fn_obj, skip_checks[fn_name])) return returned_checks
[ "def", "_get_checks", "(", "self", ",", "checkclass", ",", "skip_checks", ")", ":", "meths", "=", "inspect", ".", "getmembers", "(", "checkclass", ",", "inspect", ".", "ismethod", ")", "# return all check methods not among the skipped checks", "returned_checks", "=", ...
Helper method to retreive check methods from a Checker class. Excludes any checks in `skip_checks`. The name of the methods in the Checker class should start with "check_" for this method to find them.
[ "Helper", "method", "to", "retreive", "check", "methods", "from", "a", "Checker", "class", ".", "Excludes", "any", "checks", "in", "skip_checks", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L118-L134
train
31,479
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite._run_check
def _run_check(self, check_method, ds, max_level): """ Runs a check and appends a result to the values list. @param bound method check_method: a given check method @param netCDF4 dataset ds @param int max_level: check level @return list: list of Result objects """ val = check_method(ds) if isinstance(val, list): check_val = [] for v in val: res = fix_return_value(v, check_method.__func__.__name__, check_method, check_method.__self__) if max_level is None or res.weight > max_level: check_val.append(res) return check_val else: check_val = fix_return_value(val, check_method.__func__.__name__, check_method, check_method.__self__) if max_level is None or check_val.weight > max_level: return [check_val] else: return []
python
def _run_check(self, check_method, ds, max_level): """ Runs a check and appends a result to the values list. @param bound method check_method: a given check method @param netCDF4 dataset ds @param int max_level: check level @return list: list of Result objects """ val = check_method(ds) if isinstance(val, list): check_val = [] for v in val: res = fix_return_value(v, check_method.__func__.__name__, check_method, check_method.__self__) if max_level is None or res.weight > max_level: check_val.append(res) return check_val else: check_val = fix_return_value(val, check_method.__func__.__name__, check_method, check_method.__self__) if max_level is None or check_val.weight > max_level: return [check_val] else: return []
[ "def", "_run_check", "(", "self", ",", "check_method", ",", "ds", ",", "max_level", ")", ":", "val", "=", "check_method", "(", "ds", ")", "if", "isinstance", "(", "val", ",", "list", ")", ":", "check_val", "=", "[", "]", "for", "v", "in", "val", ":...
Runs a check and appends a result to the values list. @param bound method check_method: a given check method @param netCDF4 dataset ds @param int max_level: check level @return list: list of Result objects
[ "Runs", "a", "check", "and", "appends", "a", "result", "to", "the", "values", "list", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L136-L160
train
31,480
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite._get_check_versioned_name
def _get_check_versioned_name(self, check_name): """ The compliance checker allows the user to specify a check without a version number but we want the report to specify the version number. Returns the check name with the version number it checked """ if ':' not in check_name or ':latest' in check_name: check_name = ':'.join((check_name.split(':')[0], self.checkers[check_name]._cc_spec_version)) return check_name
python
def _get_check_versioned_name(self, check_name): """ The compliance checker allows the user to specify a check without a version number but we want the report to specify the version number. Returns the check name with the version number it checked """ if ':' not in check_name or ':latest' in check_name: check_name = ':'.join((check_name.split(':')[0], self.checkers[check_name]._cc_spec_version)) return check_name
[ "def", "_get_check_versioned_name", "(", "self", ",", "check_name", ")", ":", "if", "':'", "not", "in", "check_name", "or", "':latest'", "in", "check_name", ":", "check_name", "=", "':'", ".", "join", "(", "(", "check_name", ".", "split", "(", "':'", ")", ...
The compliance checker allows the user to specify a check without a version number but we want the report to specify the version number. Returns the check name with the version number it checked
[ "The", "compliance", "checker", "allows", "the", "user", "to", "specify", "a", "check", "without", "a", "version", "number", "but", "we", "want", "the", "report", "to", "specify", "the", "version", "number", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L162-L173
train
31,481
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.run
def run(self, ds, skip_checks, *checker_names): """ Runs this CheckSuite on the dataset with all the passed Checker instances. Returns a dictionary mapping checker names to a 2-tuple of their grouped scores and errors/exceptions while running checks. """ ret_val = {} checkers = self._get_valid_checkers(ds, checker_names) if skip_checks is not None: skip_check_dict = CheckSuite._process_skip_checks(skip_checks) else: skip_check_dict = defaultdict(lambda: None) if len(checkers) == 0: print("No valid checkers found for tests '{}'".format(",".join(checker_names))) for checker_name, checker_class in checkers: checker = checker_class() # instantiate a Checker object checker.setup(ds) # setup method to prep checks = self._get_checks(checker, skip_check_dict) vals = [] errs = {} # check method name -> (exc, traceback) for c, max_level in checks: try: vals.extend(self._run_check(c, ds, max_level)) except Exception as e: errs[c.__func__.__name__] = (e, sys.exc_info()[2]) # score the results we got back groups = self.scores(vals) ret_val[checker_name] = groups, errs return ret_val
python
def run(self, ds, skip_checks, *checker_names): """ Runs this CheckSuite on the dataset with all the passed Checker instances. Returns a dictionary mapping checker names to a 2-tuple of their grouped scores and errors/exceptions while running checks. """ ret_val = {} checkers = self._get_valid_checkers(ds, checker_names) if skip_checks is not None: skip_check_dict = CheckSuite._process_skip_checks(skip_checks) else: skip_check_dict = defaultdict(lambda: None) if len(checkers) == 0: print("No valid checkers found for tests '{}'".format(",".join(checker_names))) for checker_name, checker_class in checkers: checker = checker_class() # instantiate a Checker object checker.setup(ds) # setup method to prep checks = self._get_checks(checker, skip_check_dict) vals = [] errs = {} # check method name -> (exc, traceback) for c, max_level in checks: try: vals.extend(self._run_check(c, ds, max_level)) except Exception as e: errs[c.__func__.__name__] = (e, sys.exc_info()[2]) # score the results we got back groups = self.scores(vals) ret_val[checker_name] = groups, errs return ret_val
[ "def", "run", "(", "self", ",", "ds", ",", "skip_checks", ",", "*", "checker_names", ")", ":", "ret_val", "=", "{", "}", "checkers", "=", "self", ".", "_get_valid_checkers", "(", "ds", ",", "checker_names", ")", "if", "skip_checks", "is", "not", "None", ...
Runs this CheckSuite on the dataset with all the passed Checker instances. Returns a dictionary mapping checker names to a 2-tuple of their grouped scores and errors/exceptions while running checks.
[ "Runs", "this", "CheckSuite", "on", "the", "dataset", "with", "all", "the", "passed", "Checker", "instances", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L247-L285
train
31,482
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.dict_output
def dict_output(self, check_name, groups, source_name, limit): ''' Builds the results into a JSON structure and writes it to the file buffer. @param check_name The test which was run @param groups List of results from compliance checker @param output_filename Path to file to save output @param source_name Source of the dataset, used for title @param limit Integer value for limiting output ''' aggregates = self.build_structure(check_name, groups, source_name, limit) return self.serialize(aggregates)
python
def dict_output(self, check_name, groups, source_name, limit): ''' Builds the results into a JSON structure and writes it to the file buffer. @param check_name The test which was run @param groups List of results from compliance checker @param output_filename Path to file to save output @param source_name Source of the dataset, used for title @param limit Integer value for limiting output ''' aggregates = self.build_structure(check_name, groups, source_name, limit) return self.serialize(aggregates)
[ "def", "dict_output", "(", "self", ",", "check_name", ",", "groups", ",", "source_name", ",", "limit", ")", ":", "aggregates", "=", "self", ".", "build_structure", "(", "check_name", ",", "groups", ",", "source_name", ",", "limit", ")", "return", "self", "...
Builds the results into a JSON structure and writes it to the file buffer. @param check_name The test which was run @param groups List of results from compliance checker @param output_filename Path to file to save output @param source_name Source of the dataset, used for title @param limit Integer value for limiting output
[ "Builds", "the", "results", "into", "a", "JSON", "structure", "and", "writes", "it", "to", "the", "file", "buffer", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L380-L391
train
31,483
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.serialize
def serialize(self, o): ''' Returns a safe serializable object that can be serialized into JSON. @param o Python object to serialize ''' if isinstance(o, (list, tuple)): return [self.serialize(i) for i in o] if isinstance(o, dict): return {k: self.serialize(v) for k, v in o.items()} if isinstance(o, datetime): return o.isoformat() if isinstance(o, Result): return self.serialize(o.serialize()) return o
python
def serialize(self, o): ''' Returns a safe serializable object that can be serialized into JSON. @param o Python object to serialize ''' if isinstance(o, (list, tuple)): return [self.serialize(i) for i in o] if isinstance(o, dict): return {k: self.serialize(v) for k, v in o.items()} if isinstance(o, datetime): return o.isoformat() if isinstance(o, Result): return self.serialize(o.serialize()) return o
[ "def", "serialize", "(", "self", ",", "o", ")", ":", "if", "isinstance", "(", "o", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "[", "self", ".", "serialize", "(", "i", ")", "for", "i", "in", "o", "]", "if", "isinstance", "(", "o", ...
Returns a safe serializable object that can be serialized into JSON. @param o Python object to serialize
[ "Returns", "a", "safe", "serializable", "object", "that", "can", "be", "serialized", "into", "JSON", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L393-L407
train
31,484
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.checker_html_output
def checker_html_output(self, check_name, groups, source_name, limit): ''' Renders the HTML output for a single test using Jinja2 and returns it as a string. @param check_name The test which was run @param groups List of results from compliance checker @param source_name Source of the dataset, used for title @param limit Integer value for limiting output ''' from jinja2 import Environment, PackageLoader self.j2 = Environment(loader=PackageLoader(self.templates_root, 'data/templates')) template = self.j2.get_template('ccheck.html.j2') template_vars = self.build_structure(check_name, groups, source_name, limit) return template.render(**template_vars)
python
def checker_html_output(self, check_name, groups, source_name, limit): ''' Renders the HTML output for a single test using Jinja2 and returns it as a string. @param check_name The test which was run @param groups List of results from compliance checker @param source_name Source of the dataset, used for title @param limit Integer value for limiting output ''' from jinja2 import Environment, PackageLoader self.j2 = Environment(loader=PackageLoader(self.templates_root, 'data/templates')) template = self.j2.get_template('ccheck.html.j2') template_vars = self.build_structure(check_name, groups, source_name, limit) return template.render(**template_vars)
[ "def", "checker_html_output", "(", "self", ",", "check_name", ",", "groups", ",", "source_name", ",", "limit", ")", ":", "from", "jinja2", "import", "Environment", ",", "PackageLoader", "self", ".", "j2", "=", "Environment", "(", "loader", "=", "PackageLoader"...
Renders the HTML output for a single test using Jinja2 and returns it as a string. @param check_name The test which was run @param groups List of results from compliance checker @param source_name Source of the dataset, used for title @param limit Integer value for limiting output
[ "Renders", "the", "HTML", "output", "for", "a", "single", "test", "using", "Jinja2", "and", "returns", "it", "as", "a", "string", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L409-L424
train
31,485
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.html_output
def html_output(self, checkers_html): ''' Renders the HTML output for multiple tests and returns it as a string. @param checkers_html List of HTML for single tests as returned by checker_html_output ''' # Note: This relies on checker_html_output having been called so that # self.j2 is initialised template = self.j2.get_template('ccheck_wrapper.html.j2') return template.render(checkers=checkers_html)
python
def html_output(self, checkers_html): ''' Renders the HTML output for multiple tests and returns it as a string. @param checkers_html List of HTML for single tests as returned by checker_html_output ''' # Note: This relies on checker_html_output having been called so that # self.j2 is initialised template = self.j2.get_template('ccheck_wrapper.html.j2') return template.render(checkers=checkers_html)
[ "def", "html_output", "(", "self", ",", "checkers_html", ")", ":", "# Note: This relies on checker_html_output having been called so that", "# self.j2 is initialised", "template", "=", "self", ".", "j2", ".", "get_template", "(", "'ccheck_wrapper.html.j2'", ")", "return", "...
Renders the HTML output for multiple tests and returns it as a string. @param checkers_html List of HTML for single tests as returned by checker_html_output
[ "Renders", "the", "HTML", "output", "for", "multiple", "tests", "and", "returns", "it", "as", "a", "string", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L426-L436
train
31,486
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.standard_output
def standard_output(self, ds, limit, check_name, groups): """ Generates the Terminal Output for Standard cases Returns the dataset needed for the verbose output, as well as the failure flags. """ score_list, points, out_of = self.get_points(groups, limit) issue_count = out_of - points # Let's add the version number to the check name if it's missing check_name = self._get_check_versioned_name(check_name) check_url = self._get_check_url(check_name) width = 2 * self.col_width print('\n') print("-" * width) print('{:^{width}}'.format("IOOS Compliance Checker Report", width=width)) print('{:^{width}}'.format(check_name, width=width)) print('{:^{width}}'.format(check_url, width=width)) print("-" * width) if issue_count > 0: print('{:^{width}}'.format("Corrective Actions", width=width)) plural = '' if issue_count == 1 else 's' print("{} has {} potential issue{}".format(os.path.basename(ds), issue_count, plural)) return [groups, points, out_of]
python
def standard_output(self, ds, limit, check_name, groups): """ Generates the Terminal Output for Standard cases Returns the dataset needed for the verbose output, as well as the failure flags. """ score_list, points, out_of = self.get_points(groups, limit) issue_count = out_of - points # Let's add the version number to the check name if it's missing check_name = self._get_check_versioned_name(check_name) check_url = self._get_check_url(check_name) width = 2 * self.col_width print('\n') print("-" * width) print('{:^{width}}'.format("IOOS Compliance Checker Report", width=width)) print('{:^{width}}'.format(check_name, width=width)) print('{:^{width}}'.format(check_url, width=width)) print("-" * width) if issue_count > 0: print('{:^{width}}'.format("Corrective Actions", width=width)) plural = '' if issue_count == 1 else 's' print("{} has {} potential issue{}".format(os.path.basename(ds), issue_count, plural)) return [groups, points, out_of]
[ "def", "standard_output", "(", "self", ",", "ds", ",", "limit", ",", "check_name", ",", "groups", ")", ":", "score_list", ",", "points", ",", "out_of", "=", "self", ".", "get_points", "(", "groups", ",", "limit", ")", "issue_count", "=", "out_of", "-", ...
Generates the Terminal Output for Standard cases Returns the dataset needed for the verbose output, as well as the failure flags.
[ "Generates", "the", "Terminal", "Output", "for", "Standard", "cases" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L455-L480
train
31,487
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.standard_output_generation
def standard_output_generation(self, groups, limit, points, out_of, check): ''' Generates the Terminal Output ''' if points < out_of: self.reasoning_routine(groups, check, priority_flag=limit) else: print("All tests passed!")
python
def standard_output_generation(self, groups, limit, points, out_of, check): ''' Generates the Terminal Output ''' if points < out_of: self.reasoning_routine(groups, check, priority_flag=limit) else: print("All tests passed!")
[ "def", "standard_output_generation", "(", "self", ",", "groups", ",", "limit", ",", "points", ",", "out_of", ",", "check", ")", ":", "if", "points", "<", "out_of", ":", "self", ".", "reasoning_routine", "(", "groups", ",", "check", ",", "priority_flag", "=...
Generates the Terminal Output
[ "Generates", "the", "Terminal", "Output" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L482-L489
train
31,488
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.reasoning_routine
def reasoning_routine(self, groups, check, priority_flag=3, _top_level=True): """ print routine performed @param list groups: the Result groups @param str check: checker name @param int priority_flag: indicates the weight of the groups @param bool _top_level: indicates the level of the group so as to print out the appropriate header string """ sort_fn = lambda x: x.weight groups_sorted = sorted(groups, key=sort_fn, reverse=True) # create dict of the groups -> {level: [reasons]} result = {key: [v for v in valuesiter if v.value[0] != v.value[1]] for key, valuesiter in itertools.groupby(groups_sorted, key=sort_fn)} priorities = self.checkers[check]._cc_display_headers def process_table(res, check): """Recursively calls reasoning_routine to parse out child reasons from the parent reasons. @param Result res: Result object @param str check: checker name""" issue = res.name if not res.children: reasons = res.msgs else: child_reasons = self.reasoning_routine(res.children, check, _top_level=False) # there shouldn't be messages if there are children # is this a valid assumption? reasons = child_reasons return issue, reasons # iterate in reverse to the min priority requested; # the higher the limit, the more lenient the output proc_strs = "" for level in range(3, priority_flag - 1, -1): level_name = priorities.get(level, level) # print headers proc_strs = [] # skip any levels that aren't in the result if level not in result: continue # skip any empty result levels if len(result[level]) > 0: # only print priority headers at top level, i.e. non-child # datasets if _top_level: width = 2 * self.col_width print("\n") print('{:^{width}}'.format(level_name, width=width)) print("-" * width) data_issues = [process_table(res, check) for res in result[level]] has_printed = False for issue, reasons in data_issues: # if this isn't the first printed issue, add a newline # separating this and the previous level if has_printed: print("") # join alphabetized reasons together reason_str = "\n".join('* {}'.format(r) for r in sorted(reasons, key=lambda x: x[0])) proc_str = "{}\n{}".format(issue, reason_str) print(proc_str) proc_strs.append(proc_str) has_printed = True return "\n".join(proc_strs)
python
def reasoning_routine(self, groups, check, priority_flag=3, _top_level=True): """ print routine performed @param list groups: the Result groups @param str check: checker name @param int priority_flag: indicates the weight of the groups @param bool _top_level: indicates the level of the group so as to print out the appropriate header string """ sort_fn = lambda x: x.weight groups_sorted = sorted(groups, key=sort_fn, reverse=True) # create dict of the groups -> {level: [reasons]} result = {key: [v for v in valuesiter if v.value[0] != v.value[1]] for key, valuesiter in itertools.groupby(groups_sorted, key=sort_fn)} priorities = self.checkers[check]._cc_display_headers def process_table(res, check): """Recursively calls reasoning_routine to parse out child reasons from the parent reasons. @param Result res: Result object @param str check: checker name""" issue = res.name if not res.children: reasons = res.msgs else: child_reasons = self.reasoning_routine(res.children, check, _top_level=False) # there shouldn't be messages if there are children # is this a valid assumption? reasons = child_reasons return issue, reasons # iterate in reverse to the min priority requested; # the higher the limit, the more lenient the output proc_strs = "" for level in range(3, priority_flag - 1, -1): level_name = priorities.get(level, level) # print headers proc_strs = [] # skip any levels that aren't in the result if level not in result: continue # skip any empty result levels if len(result[level]) > 0: # only print priority headers at top level, i.e. non-child # datasets if _top_level: width = 2 * self.col_width print("\n") print('{:^{width}}'.format(level_name, width=width)) print("-" * width) data_issues = [process_table(res, check) for res in result[level]] has_printed = False for issue, reasons in data_issues: # if this isn't the first printed issue, add a newline # separating this and the previous level if has_printed: print("") # join alphabetized reasons together reason_str = "\n".join('* {}'.format(r) for r in sorted(reasons, key=lambda x: x[0])) proc_str = "{}\n{}".format(issue, reason_str) print(proc_str) proc_strs.append(proc_str) has_printed = True return "\n".join(proc_strs)
[ "def", "reasoning_routine", "(", "self", ",", "groups", ",", "check", ",", "priority_flag", "=", "3", ",", "_top_level", "=", "True", ")", ":", "sort_fn", "=", "lambda", "x", ":", "x", ".", "weight", "groups_sorted", "=", "sorted", "(", "groups", ",", ...
print routine performed @param list groups: the Result groups @param str check: checker name @param int priority_flag: indicates the weight of the groups @param bool _top_level: indicates the level of the group so as to print out the appropriate header string
[ "print", "routine", "performed" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L491-L565
train
31,489
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.process_doc
def process_doc(self, doc): """ Attempt to parse an xml string conforming to either an SOS or SensorML dataset and return the results """ xml_doc = ET.fromstring(doc) if xml_doc.tag == "{http://www.opengis.net/sos/1.0}Capabilities": ds = SensorObservationService(None, xml=doc) # SensorObservationService does not store the etree doc root, # so maybe use monkey patching here for now? ds._root = xml_doc elif xml_doc.tag == "{http://www.opengis.net/sensorML/1.0.1}SensorML": ds = SensorML(xml_doc) else: raise ValueError("Unrecognized XML root element: {}".format(xml_doc.tag)) return ds
python
def process_doc(self, doc): """ Attempt to parse an xml string conforming to either an SOS or SensorML dataset and return the results """ xml_doc = ET.fromstring(doc) if xml_doc.tag == "{http://www.opengis.net/sos/1.0}Capabilities": ds = SensorObservationService(None, xml=doc) # SensorObservationService does not store the etree doc root, # so maybe use monkey patching here for now? ds._root = xml_doc elif xml_doc.tag == "{http://www.opengis.net/sensorML/1.0.1}SensorML": ds = SensorML(xml_doc) else: raise ValueError("Unrecognized XML root element: {}".format(xml_doc.tag)) return ds
[ "def", "process_doc", "(", "self", ",", "doc", ")", ":", "xml_doc", "=", "ET", ".", "fromstring", "(", "doc", ")", "if", "xml_doc", ".", "tag", "==", "\"{http://www.opengis.net/sos/1.0}Capabilities\"", ":", "ds", "=", "SensorObservationService", "(", "None", "...
Attempt to parse an xml string conforming to either an SOS or SensorML dataset and return the results
[ "Attempt", "to", "parse", "an", "xml", "string", "conforming", "to", "either", "an", "SOS", "or", "SensorML", "dataset", "and", "return", "the", "results" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L568-L584
train
31,490
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.generate_dataset
def generate_dataset(self, cdl_path): ''' Use ncgen to generate a netCDF file from a .cdl file Returns the path to the generated netcdf file :param str cdl_path: Absolute path to cdl file that is used to generate netCDF file ''' if '.cdl' in cdl_path: # it's possible the filename doesn't have the .cdl extension ds_str = cdl_path.replace('.cdl', '.nc') else: ds_str = cdl_path + '.nc' subprocess.call(['ncgen', '-o', ds_str, cdl_path]) return ds_str
python
def generate_dataset(self, cdl_path): ''' Use ncgen to generate a netCDF file from a .cdl file Returns the path to the generated netcdf file :param str cdl_path: Absolute path to cdl file that is used to generate netCDF file ''' if '.cdl' in cdl_path: # it's possible the filename doesn't have the .cdl extension ds_str = cdl_path.replace('.cdl', '.nc') else: ds_str = cdl_path + '.nc' subprocess.call(['ncgen', '-o', ds_str, cdl_path]) return ds_str
[ "def", "generate_dataset", "(", "self", ",", "cdl_path", ")", ":", "if", "'.cdl'", "in", "cdl_path", ":", "# it's possible the filename doesn't have the .cdl extension", "ds_str", "=", "cdl_path", ".", "replace", "(", "'.cdl'", ",", "'.nc'", ")", "else", ":", "ds_...
Use ncgen to generate a netCDF file from a .cdl file Returns the path to the generated netcdf file :param str cdl_path: Absolute path to cdl file that is used to generate netCDF file
[ "Use", "ncgen", "to", "generate", "a", "netCDF", "file", "from", "a", ".", "cdl", "file", "Returns", "the", "path", "to", "the", "generated", "netcdf", "file" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L586-L598
train
31,491
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.load_dataset
def load_dataset(self, ds_str): """ Returns an instantiated instance of either a netCDF file or an SOS mapped DS object. :param str ds_str: URL of the resource to load """ # If it's a remote URL load it as a remote resource, otherwise treat it # as a local resource. pr = urlparse(ds_str) if pr.netloc: return self.load_remote_dataset(ds_str) return self.load_local_dataset(ds_str)
python
def load_dataset(self, ds_str): """ Returns an instantiated instance of either a netCDF file or an SOS mapped DS object. :param str ds_str: URL of the resource to load """ # If it's a remote URL load it as a remote resource, otherwise treat it # as a local resource. pr = urlparse(ds_str) if pr.netloc: return self.load_remote_dataset(ds_str) return self.load_local_dataset(ds_str)
[ "def", "load_dataset", "(", "self", ",", "ds_str", ")", ":", "# If it's a remote URL load it as a remote resource, otherwise treat it", "# as a local resource.", "pr", "=", "urlparse", "(", "ds_str", ")", "if", "pr", ".", "netloc", ":", "return", "self", ".", "load_re...
Returns an instantiated instance of either a netCDF file or an SOS mapped DS object. :param str ds_str: URL of the resource to load
[ "Returns", "an", "instantiated", "instance", "of", "either", "a", "netCDF", "file", "or", "an", "SOS", "mapped", "DS", "object", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L600-L612
train
31,492
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.load_remote_dataset
def load_remote_dataset(self, ds_str): ''' Returns a dataset instance for the remote resource, either OPeNDAP or SOS :param str ds_str: URL to the remote resource ''' if opendap.is_opendap(ds_str): return Dataset(ds_str) else: # Check if the HTTP response is XML, if it is, it's likely SOS so # we'll attempt to parse the response as SOS response = requests.get(ds_str, allow_redirects=True) if 'text/xml' in response.headers['content-type']: return self.process_doc(response.content) raise ValueError("Unknown service with content-type: {}".format(response.headers['content-type']))
python
def load_remote_dataset(self, ds_str): ''' Returns a dataset instance for the remote resource, either OPeNDAP or SOS :param str ds_str: URL to the remote resource ''' if opendap.is_opendap(ds_str): return Dataset(ds_str) else: # Check if the HTTP response is XML, if it is, it's likely SOS so # we'll attempt to parse the response as SOS response = requests.get(ds_str, allow_redirects=True) if 'text/xml' in response.headers['content-type']: return self.process_doc(response.content) raise ValueError("Unknown service with content-type: {}".format(response.headers['content-type']))
[ "def", "load_remote_dataset", "(", "self", ",", "ds_str", ")", ":", "if", "opendap", ".", "is_opendap", "(", "ds_str", ")", ":", "return", "Dataset", "(", "ds_str", ")", "else", ":", "# Check if the HTTP response is XML, if it is, it's likely SOS so", "# we'll attempt...
Returns a dataset instance for the remote resource, either OPeNDAP or SOS :param str ds_str: URL to the remote resource
[ "Returns", "a", "dataset", "instance", "for", "the", "remote", "resource", "either", "OPeNDAP", "or", "SOS" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L614-L630
train
31,493
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite.load_local_dataset
def load_local_dataset(self, ds_str): ''' Returns a dataset instance for the local resource :param ds_str: Path to the resource ''' if cdl.is_cdl(ds_str): ds_str = self.generate_dataset(ds_str) if netcdf.is_netcdf(ds_str): return MemoizedDataset(ds_str) # Assume this is just a Generic File if it exists if os.path.isfile(ds_str): return GenericFile(ds_str) raise ValueError("File is an unknown format")
python
def load_local_dataset(self, ds_str): ''' Returns a dataset instance for the local resource :param ds_str: Path to the resource ''' if cdl.is_cdl(ds_str): ds_str = self.generate_dataset(ds_str) if netcdf.is_netcdf(ds_str): return MemoizedDataset(ds_str) # Assume this is just a Generic File if it exists if os.path.isfile(ds_str): return GenericFile(ds_str) raise ValueError("File is an unknown format")
[ "def", "load_local_dataset", "(", "self", ",", "ds_str", ")", ":", "if", "cdl", ".", "is_cdl", "(", "ds_str", ")", ":", "ds_str", "=", "self", ".", "generate_dataset", "(", "ds_str", ")", "if", "netcdf", ".", "is_netcdf", "(", "ds_str", ")", ":", "retu...
Returns a dataset instance for the local resource :param ds_str: Path to the resource
[ "Returns", "a", "dataset", "instance", "for", "the", "local", "resource" ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L632-L648
train
31,494
ioos/compliance-checker
compliance_checker/suite.py
CheckSuite._group_raw
def _group_raw(self, raw_scores, cur=None, level=1): """ Internal recursive method to group raw scores into a cascading score summary. Only top level items are tallied for scores. @param list raw_scores: list of raw scores (Result objects) """ # BEGIN INTERNAL FUNCS ######################################## def trim_groups(r): if isinstance(r.name, tuple) or isinstance(r.name, list): new_name = r.name[1:] else: new_name = [] return Result(r.weight, r.value, new_name, r.msgs) # CHECK FOR TERMINAL CONDITION: all raw_scores.name are single length # @TODO could have a problem here with scalar name, but probably still works terminal = [len(x.name) for x in raw_scores] if terminal == [0] * len(raw_scores): return [] def group_func(r): """ Takes a Result object and slices off the first element of its name if its's a tuple. Otherwise, does nothing to the name. Returns the Result's name and weight in a tuple to be used for sorting in that order in a groupby function. @param Result r @return tuple (str, int) """ if isinstance(r.name, tuple) or isinstance(r.name, list): if len(r.name) == 0: retval = '' else: retval = r.name[0:1][0] else: retval = r.name return retval, r.weight # END INTERNAL FUNCS ########################################## # NOTE until this point, *ALL* Results in raw_scores are # individual Result objects. # sort then group by name, then by priority weighting grouped = itertools.groupby(sorted(raw_scores, key=group_func), key=group_func) # NOTE: post-grouping, grouped looks something like # [(('Global Attributes', 1), <itertools._grouper at 0x7f10982b5390>), # (('Global Attributes', 3), <itertools._grouper at 0x7f10982b5438>), # (('Not a Global Attr', 1), <itertools._grouper at 0x7f10982b5470>)] # (('Some Variable', 2), <itertools._grouper at 0x7f10982b5400>), ret_val = [] for k, v in grouped: # iterate through the grouped tuples k = k[0] # slice ("name", weight_val) --> "name" v = list(v) # from itertools._grouper to list cv = self._group_raw(list(map(trim_groups, v)), k, level + 1) if len(cv): # if this node has children, max weight of children + sum of all the scores max_weight = max([x.weight for x in cv]) sum_scores = tuple(map(sum, list(zip(*([x.value for x in cv]))))) msgs = [] else: max_weight = max([x.weight for x in v]) sum_scores = tuple(map(sum, list(zip(*([self._translate_value(x.value) for x in v]))))) msgs = sum([x.msgs for x in v], []) ret_val.append(Result(name=k, weight=max_weight, value=sum_scores, children=cv, msgs=msgs)) return ret_val
python
def _group_raw(self, raw_scores, cur=None, level=1): """ Internal recursive method to group raw scores into a cascading score summary. Only top level items are tallied for scores. @param list raw_scores: list of raw scores (Result objects) """ # BEGIN INTERNAL FUNCS ######################################## def trim_groups(r): if isinstance(r.name, tuple) or isinstance(r.name, list): new_name = r.name[1:] else: new_name = [] return Result(r.weight, r.value, new_name, r.msgs) # CHECK FOR TERMINAL CONDITION: all raw_scores.name are single length # @TODO could have a problem here with scalar name, but probably still works terminal = [len(x.name) for x in raw_scores] if terminal == [0] * len(raw_scores): return [] def group_func(r): """ Takes a Result object and slices off the first element of its name if its's a tuple. Otherwise, does nothing to the name. Returns the Result's name and weight in a tuple to be used for sorting in that order in a groupby function. @param Result r @return tuple (str, int) """ if isinstance(r.name, tuple) or isinstance(r.name, list): if len(r.name) == 0: retval = '' else: retval = r.name[0:1][0] else: retval = r.name return retval, r.weight # END INTERNAL FUNCS ########################################## # NOTE until this point, *ALL* Results in raw_scores are # individual Result objects. # sort then group by name, then by priority weighting grouped = itertools.groupby(sorted(raw_scores, key=group_func), key=group_func) # NOTE: post-grouping, grouped looks something like # [(('Global Attributes', 1), <itertools._grouper at 0x7f10982b5390>), # (('Global Attributes', 3), <itertools._grouper at 0x7f10982b5438>), # (('Not a Global Attr', 1), <itertools._grouper at 0x7f10982b5470>)] # (('Some Variable', 2), <itertools._grouper at 0x7f10982b5400>), ret_val = [] for k, v in grouped: # iterate through the grouped tuples k = k[0] # slice ("name", weight_val) --> "name" v = list(v) # from itertools._grouper to list cv = self._group_raw(list(map(trim_groups, v)), k, level + 1) if len(cv): # if this node has children, max weight of children + sum of all the scores max_weight = max([x.weight for x in cv]) sum_scores = tuple(map(sum, list(zip(*([x.value for x in cv]))))) msgs = [] else: max_weight = max([x.weight for x in v]) sum_scores = tuple(map(sum, list(zip(*([self._translate_value(x.value) for x in v]))))) msgs = sum([x.msgs for x in v], []) ret_val.append(Result(name=k, weight=max_weight, value=sum_scores, children=cv, msgs=msgs)) return ret_val
[ "def", "_group_raw", "(", "self", ",", "raw_scores", ",", "cur", "=", "None", ",", "level", "=", "1", ")", ":", "# BEGIN INTERNAL FUNCS ########################################", "def", "trim_groups", "(", "r", ")", ":", "if", "isinstance", "(", "r", ".", "nam...
Internal recursive method to group raw scores into a cascading score summary. Only top level items are tallied for scores. @param list raw_scores: list of raw scores (Result objects)
[ "Internal", "recursive", "method", "to", "group", "raw", "scores", "into", "a", "cascading", "score", "summary", ".", "Only", "top", "level", "items", "are", "tallied", "for", "scores", "." ]
ee89c27b0daade58812489a2da3aa3b6859eafd9
https://github.com/ioos/compliance-checker/blob/ee89c27b0daade58812489a2da3aa3b6859eafd9/compliance_checker/suite.py#L658-L733
train
31,495
Preston-Landers/concurrent-log-handler
src/concurrent_log_handler/queue.py
get_all_logger_names
def get_all_logger_names(include_root=False): """Return ``list`` of names of all loggers than have been accessed. Warning: this is sensitive to internal structures in the standard logging module. """ # noinspection PyUnresolvedReferences rv = list(logging.Logger.manager.loggerDict.keys()) if include_root: rv.insert(0, '') return rv
python
def get_all_logger_names(include_root=False): """Return ``list`` of names of all loggers than have been accessed. Warning: this is sensitive to internal structures in the standard logging module. """ # noinspection PyUnresolvedReferences rv = list(logging.Logger.manager.loggerDict.keys()) if include_root: rv.insert(0, '') return rv
[ "def", "get_all_logger_names", "(", "include_root", "=", "False", ")", ":", "# noinspection PyUnresolvedReferences", "rv", "=", "list", "(", "logging", ".", "Logger", ".", "manager", ".", "loggerDict", ".", "keys", "(", ")", ")", "if", "include_root", ":", "rv...
Return ``list`` of names of all loggers than have been accessed. Warning: this is sensitive to internal structures in the standard logging module.
[ "Return", "list", "of", "names", "of", "all", "loggers", "than", "have", "been", "accessed", "." ]
8e0b8e28c2b12e854853d723b3c28346a3218914
https://github.com/Preston-Landers/concurrent-log-handler/blob/8e0b8e28c2b12e854853d723b3c28346a3218914/src/concurrent_log_handler/queue.py#L83-L92
train
31,496
Preston-Landers/concurrent-log-handler
src/concurrent_log_handler/queue.py
queuify_logger
def queuify_logger(logger, queue_handler, queue_listener): """Replace logger's handlers with a queue handler while adding existing handlers to a queue listener. This is useful when you want to use a default logging config but then optionally add a logger's handlers to a queue during runtime. Args: logger (mixed): Logger instance or string name of logger to queue-ify handlers. queue_handler (QueueHandler): Instance of a ``QueueHandler``. queue_listener (QueueListener): Instance of a ``QueueListener``. """ if isinstance(logger, str): logger = logging.getLogger(logger) # Get handlers that aren't being listened for. handlers = [handler for handler in logger.handlers if handler not in queue_listener.handlers] if handlers: # The default QueueListener stores handlers as a tuple. queue_listener.handlers = \ tuple(list(queue_listener.handlers) + handlers) # Remove logger's handlers and replace with single queue handler. del logger.handlers[:] logger.addHandler(queue_handler)
python
def queuify_logger(logger, queue_handler, queue_listener): """Replace logger's handlers with a queue handler while adding existing handlers to a queue listener. This is useful when you want to use a default logging config but then optionally add a logger's handlers to a queue during runtime. Args: logger (mixed): Logger instance or string name of logger to queue-ify handlers. queue_handler (QueueHandler): Instance of a ``QueueHandler``. queue_listener (QueueListener): Instance of a ``QueueListener``. """ if isinstance(logger, str): logger = logging.getLogger(logger) # Get handlers that aren't being listened for. handlers = [handler for handler in logger.handlers if handler not in queue_listener.handlers] if handlers: # The default QueueListener stores handlers as a tuple. queue_listener.handlers = \ tuple(list(queue_listener.handlers) + handlers) # Remove logger's handlers and replace with single queue handler. del logger.handlers[:] logger.addHandler(queue_handler)
[ "def", "queuify_logger", "(", "logger", ",", "queue_handler", ",", "queue_listener", ")", ":", "if", "isinstance", "(", "logger", ",", "str", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "logger", ")", "# Get handlers that aren't being listened for."...
Replace logger's handlers with a queue handler while adding existing handlers to a queue listener. This is useful when you want to use a default logging config but then optionally add a logger's handlers to a queue during runtime. Args: logger (mixed): Logger instance or string name of logger to queue-ify handlers. queue_handler (QueueHandler): Instance of a ``QueueHandler``. queue_listener (QueueListener): Instance of a ``QueueListener``.
[ "Replace", "logger", "s", "handlers", "with", "a", "queue", "handler", "while", "adding", "existing", "handlers", "to", "a", "queue", "listener", "." ]
8e0b8e28c2b12e854853d723b3c28346a3218914
https://github.com/Preston-Landers/concurrent-log-handler/blob/8e0b8e28c2b12e854853d723b3c28346a3218914/src/concurrent_log_handler/queue.py#L95-L123
train
31,497
Preston-Landers/concurrent-log-handler
src/concurrent_log_handler/__init__.py
ConcurrentRotatingFileHandler._alter_umask
def _alter_umask(self): """Temporarily alter umask to custom setting, if applicable""" if self.umask is None: yield # nothing to do else: prev_umask = os.umask(self.umask) try: yield finally: os.umask(prev_umask)
python
def _alter_umask(self): """Temporarily alter umask to custom setting, if applicable""" if self.umask is None: yield # nothing to do else: prev_umask = os.umask(self.umask) try: yield finally: os.umask(prev_umask)
[ "def", "_alter_umask", "(", "self", ")", ":", "if", "self", ".", "umask", "is", "None", ":", "yield", "# nothing to do", "else", ":", "prev_umask", "=", "os", ".", "umask", "(", "self", ".", "umask", ")", "try", ":", "yield", "finally", ":", "os", "....
Temporarily alter umask to custom setting, if applicable
[ "Temporarily", "alter", "umask", "to", "custom", "setting", "if", "applicable" ]
8e0b8e28c2b12e854853d723b3c28346a3218914
https://github.com/Preston-Landers/concurrent-log-handler/blob/8e0b8e28c2b12e854853d723b3c28346a3218914/src/concurrent_log_handler/__init__.py#L262-L271
train
31,498
panosl/django-currencies
currencies/management/commands/currencies.py
Command.get_imports
def get_imports(self, option): """ See if we have been passed a set of currencies or a setting variable or look for settings CURRENCIES or SHOP_CURRENCIES. """ if option: if len(option) == 1 and option[0].isupper() and len(option[0]) > 3: return getattr(settings, option[0]) else: codes = [e for e in option if e.isupper() and len(e) == 3] if len(codes) != len(option): raise ImproperlyConfigured("Invalid currency codes found: %s" % codes) return codes for attr in ('CURRENCIES', 'SHOP_CURRENCIES'): try: return getattr(settings, attr) except AttributeError: continue return option
python
def get_imports(self, option): """ See if we have been passed a set of currencies or a setting variable or look for settings CURRENCIES or SHOP_CURRENCIES. """ if option: if len(option) == 1 and option[0].isupper() and len(option[0]) > 3: return getattr(settings, option[0]) else: codes = [e for e in option if e.isupper() and len(e) == 3] if len(codes) != len(option): raise ImproperlyConfigured("Invalid currency codes found: %s" % codes) return codes for attr in ('CURRENCIES', 'SHOP_CURRENCIES'): try: return getattr(settings, attr) except AttributeError: continue return option
[ "def", "get_imports", "(", "self", ",", "option", ")", ":", "if", "option", ":", "if", "len", "(", "option", ")", "==", "1", "and", "option", "[", "0", "]", ".", "isupper", "(", ")", "and", "len", "(", "option", "[", "0", "]", ")", ">", "3", ...
See if we have been passed a set of currencies or a setting variable or look for settings CURRENCIES or SHOP_CURRENCIES.
[ "See", "if", "we", "have", "been", "passed", "a", "set", "of", "currencies", "or", "a", "setting", "variable", "or", "look", "for", "settings", "CURRENCIES", "or", "SHOP_CURRENCIES", "." ]
8d4c6c202ad7c4cc06263ab2c1b1f969bbe99acd
https://github.com/panosl/django-currencies/blob/8d4c6c202ad7c4cc06263ab2c1b1f969bbe99acd/currencies/management/commands/currencies.py#L47-L65
train
31,499