_id stringlengths 2 7 | title stringlengths 3 140 | partition stringclasses 3
values | text stringlengths 73 34.1k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q175500 | InvCatalogRef.release | test | public void release() {
datasets = new java.util.ArrayList<>();
proxy = null;
useProxy = false;
init = false;
} | java | {
"resource": ""
} |
q175501 | StructureDataDeep.copy | test | static public StructureDataDeep copy(StructureData sdata, StructureMembers members) {
ArrayStructureBB abb = copyToArrayBB(sdata, members, ByteOrder.BIG_ENDIAN);
return new StructureDataDeep(abb);
} | java | {
"resource": ""
} |
q175502 | StructureDataDeep.copyToArrayBB | test | static public ArrayStructureBB copyToArrayBB(ArrayStructure as, ByteOrder bo, boolean canonical) throws IOException {
if (!canonical && as.getClass().equals(ArrayStructureBB.class)) { // no subclasses, LOOK detect already canonical later
ArrayStructureBB abb = (ArrayStructureBB) as;
ByteBuffer bb = abb.getByteBuffer();
if (bo == null || bo.equals(bb.order()))
return abb;
}
StructureMembers smo = as.getStructureMembers();
StructureMembers sm = new StructureMembers(smo);
ArrayStructureBB abb = new ArrayStructureBB(sm, as.getShape());
ArrayStructureBB.setOffsets(sm); // this makes the packing canonical
if (bo != null) {
ByteBuffer bb = abb.getByteBuffer();
bb.order(bo);
}
try (StructureDataIterator iter = as.getStructureDataIterator()) {
while (iter.hasNext())
copyToArrayBB(iter.next(), abb);
}
return abb;
} | java | {
"resource": ""
} |
q175503 | StructureDataDeep.copyToArrayBB | test | static public ArrayStructureBB copyToArrayBB(Structure s, ArrayStructure as, ByteOrder bo) throws IOException {
StructureMembers sm = s.makeStructureMembers();
ArrayStructureBB abb = new ArrayStructureBB(sm, as.getShape());
ArrayStructureBB.setOffsets(sm);
if (bo != null) {
ByteBuffer bb = abb.getByteBuffer();
bb.order(bo);
}
try (StructureDataIterator iter = as.getStructureDataIterator()) {
while (iter.hasNext())
copyToArrayBB(iter.next(), abb);
}
return abb;
} | java | {
"resource": ""
} |
q175504 | StructureDataDeep.copyToArrayBB | test | static public ArrayStructureBB copyToArrayBB(StructureData sdata) {
return copyToArrayBB(sdata, new StructureMembers(sdata.getStructureMembers()), ByteOrder.BIG_ENDIAN);
} | java | {
"resource": ""
} |
q175505 | StructureDataDeep.copyToArrayBB | test | static public ArrayStructureBB copyToArrayBB(StructureData sdata, StructureMembers sm, ByteOrder bo) {
int size = sm.getStructureSize();
ByteBuffer bb = ByteBuffer.allocate(size); // default is big endian
bb.order(bo);
ArrayStructureBB abb = new ArrayStructureBB(sm, new int[]{1}, bb, 0);
ArrayStructureBB.setOffsets(sm);
copyToArrayBB(sdata, abb);
return abb;
} | java | {
"resource": ""
} |
q175506 | NwsMetDevTables.getForecastTimeIntervalSizeInHours | test | @Override
public double getForecastTimeIntervalSizeInHours(Grib2Pds pds) {
Grib2Pds.PdsInterval pdsIntv = (Grib2Pds.PdsInterval) pds;
// override here only if timeRangeUnit = 255
boolean needOverride = false;
for (Grib2Pds.TimeInterval ti : pdsIntv.getTimeIntervals()) {
needOverride = (ti.timeRangeUnit == 255);
}
if (!needOverride)
return super.getForecastTimeIntervalSizeInHours(pds);
return 12.0;
} | java | {
"resource": ""
} |
q175507 | CoordinateAxis1D.section | test | public CoordinateAxis1D section(Range r) throws InvalidRangeException {
Section section = new Section().appendRange(r);
CoordinateAxis1D result = (CoordinateAxis1D) section(section);
int len = r.length();
// deal with the midpoints, bounds
if (isNumeric()) {
double[] new_mids = new double[len];
for (int idx = 0; idx < len; idx++) {
int old_idx = r.element(idx);
new_mids[idx] = coords[old_idx];
}
result.coords = new_mids;
if (isInterval) {
double[] new_bound1 = new double[len];
double[] new_bound2 = new double[len];
double[] new_edge = new double[len + 1];
for (int idx = 0; idx < len; idx++) {
int old_idx = r.element(idx);
new_bound1[idx] = bound1[old_idx];
new_bound2[idx] = bound2[old_idx];
new_edge[idx] = bound1[old_idx];
new_edge[idx + 1] = bound2[old_idx]; // all but last are overwritten
}
result.bound1 = new_bound1;
result.bound2 = new_bound2;
result.edge = new_edge;
} else {
double[] new_edge = new double[len + 1];
for (int idx = 0; idx < len; idx++) {
int old_idx = r.element(idx);
new_edge[idx] = edge[old_idx];
new_edge[idx + 1] = edge[old_idx + 1]; // all but last are overwritten
}
result.edge = new_edge;
}
}
if (names != null) {
String[] new_names = new String[len];
for (int idx = 0; idx < len; idx++) {
int old_idx = r.element(idx);
new_names[idx] = names[old_idx];
}
result.names = new_names;
}
result.wasCalcRegular = false;
result.calcIsRegular();
return result;
} | java | {
"resource": ""
} |
q175508 | TableAnalyzer.factory | test | static public TableAnalyzer factory(TableConfigurer tc, FeatureType wantFeatureType, NetcdfDataset ds) throws IOException {
// Create a TableAnalyzer with this TableConfigurer (may be null)
TableAnalyzer analyzer = new TableAnalyzer(ds, tc);
if (tc != null) {
if (tc.getConvName() == null)
analyzer.userAdvice.format(" No 'Conventions' global attribute.%n");
else
analyzer.userAdvice.format(" Conventions global attribute = %s %n", tc.getConvName());
// add the convention name used
if (tc.getConvUsed() != null) {
analyzer.setConventionUsed(tc.getConvUsed());
if (!tc.getConvUsed().equals(tc.getConvName()))
analyzer.userAdvice.format(" TableConfigurer used = "+tc.getConvUsed()+".%n");
}
} else {
analyzer.userAdvice.format(" No TableConfigurer found, using default analysis.%n");
}
// construct the nested table object
analyzer.analyze(wantFeatureType);
return analyzer;
} | java | {
"resource": ""
} |
q175509 | TableAnalyzer.getFirstFeatureType | test | public FeatureType getFirstFeatureType() {
for (NestedTable nt : leaves) {
if (nt.hasCoords())
return nt.getFeatureType();
}
return null;
} | java | {
"resource": ""
} |
q175510 | TableAnalyzer.analyze | test | private void analyze(FeatureType wantFeatureType) throws IOException {
// for netcdf-3 files, convert record dimension to structure
// LOOK may be problems when served via opendap
boolean structAdded = (Boolean) ds.sendIospMessage(NetcdfFile.IOSP_MESSAGE_ADD_RECORD_STRUCTURE);
if (tc == null) {
makeTablesDefault(structAdded);
makeNestedTables();
} else {
configResult = tc.getConfig(wantFeatureType, ds, errlog);
if (configResult != null)
addTableRecurse( configResult); // kinda stupid
else { // use default
makeTablesDefault(structAdded);
makeNestedTables();
}
}
// find the leaves
for (TableConfig config : tableSet) {
if (config.children == null) { // its a leaf
NestedTable flatTable = new NestedTable(ds, config, errlog);
leaves.add(flatTable);
}
}
if (PointDatasetStandardFactory.showTables)
getDetailInfo( new Formatter( System.out));
} | java | {
"resource": ""
} |
q175511 | McIDASGridServiceProvider.sync | test | public boolean sync() {
try {
if (!mcGridReader.init()) {
return false;
}
GridIndex index = mcGridReader.getGridIndex();
// reconstruct the ncfile objects
ncfile.empty();
open(index, null);
return true;
} catch (IOException ioe) {
return false;
}
} | java | {
"resource": ""
} |
q175512 | GradsUtil.getGaussianLatitudes | test | public static double[] getGaussianLatitudes(String type, int start,
int num)
throws IllegalArgumentException {
double[] baseArray = null;
start--; // it's one based
if (type.equalsIgnoreCase(GAUST62)) {
baseArray = gltst62;
} else if (type.equalsIgnoreCase(GAUSR15)) {
baseArray = glts15;
} else if (type.equalsIgnoreCase(GAUSR20)) {
baseArray = glts20;
} else if (type.equalsIgnoreCase(GAUSR30)) {
baseArray = glts30;
} else if (type.equalsIgnoreCase(GAUSR40)) {
baseArray = glats;
} else {
throw new IllegalArgumentException("Unsupported type: " + type);
}
if (start + num > baseArray.length) {
throw new IllegalArgumentException("Maximum " + baseArray.length
+ " latitudes exceeded");
}
double[] retVals = new double[num];
for (int i = 0; i < num; i++) {
retVals[i] = baseArray[start + i];
}
return retVals;
} | java | {
"resource": ""
} |
q175513 | Swap.swapLong | test | static public long swapLong(byte[] b, int offset) {
// 8 bytes
long accum = 0;
long shiftedval;
for (int shiftBy = 0, i = offset; shiftBy < 64; shiftBy += 8, i++) {
shiftedval = ((long) (b[i] & 0xff)) << shiftBy;
accum |= shiftedval;
}
return accum;
} | java | {
"resource": ""
} |
q175514 | Swap.swapFloat | test | static public float swapFloat(byte[] b, int offset) {
int accum = 0;
for (int shiftBy = 0, i = offset; shiftBy < 32; shiftBy += 8, i++)
accum |= (b[i] & 0xff) << shiftBy;
return Float.intBitsToFloat(accum);
} | java | {
"resource": ""
} |
q175515 | Swap.swapChar | test | static public char swapChar(byte[] b, int offset) {
// 2 bytes
int low = b[offset] & 0xff;
int high = b[offset + 1] & 0xff;
return (char) (high << 8 | low);
} | java | {
"resource": ""
} |
q175516 | CSMConvention.findCoordinateAxes | test | protected void findCoordinateAxes(NetcdfDataset ds) {
// coordinates is an alias for _CoordinateAxes
for (VarProcess vp : varList) {
if (vp.coordAxes == null) { // dont override if already set
String coordsString = ds.findAttValueIgnoreCase(vp.v, CF.COORDINATES, null);
if (coordsString != null) {
vp.coordinates = coordsString;
}
}
}
super.findCoordinateAxes(ds);
} | java | {
"resource": ""
} |
q175517 | CSMConvention.addParameter2 | test | protected boolean addParameter2(CoordinateTransform rs, String paramName, NetcdfFile ds, AttributeContainer v, String attName, boolean readData) {
String varName;
if (null == (varName = v.findAttValueIgnoreCase(attName, null))) {
parseInfo.format("CSMConvention No Attribute named %s%n", attName);
return false;
}
varName = varName.trim();
Variable dataVar;
if (null == (dataVar = ds.findVariable(varName))) {
parseInfo.format("CSMConvention No Variable named %s%n", varName);
return false;
}
if (readData) {
Array data;
try {
data = dataVar.read();
} catch (IOException e) {
parseInfo.format("CSMConvention failed on read of %s err= %s%n", varName, e.getMessage());
return false;
}
double[] vals = (double []) data.get1DJavaArray(DataType.DOUBLE);
rs.addParameter(new Parameter(paramName, vals));
} else
rs.addParameter(new Parameter(paramName, varName));
return true;
} | java | {
"resource": ""
} |
q175518 | Group.commonParent | test | public Group commonParent(Group other) {
if (isParent(other)) return this;
if (other.isParent(this)) return other;
while (!other.isParent(this))
other = other.getParentGroup();
return other;
} | java | {
"resource": ""
} |
q175519 | Group.isParent | test | public boolean isParent(Group other) {
while ((other != this) && (other.getParentGroup() != null))
other = other.getParentGroup();
return (other == this);
} | java | {
"resource": ""
} |
q175520 | Group.setParentGroup | test | public void setParentGroup(Group parent) {
if (immutable) throw new IllegalStateException("Cant modify");
super.setParentGroup(parent == null ? ncfile.getRootGroup() : parent);
} | java | {
"resource": ""
} |
q175521 | Group.addDimension | test | public void addDimension(Dimension dim) {
if (immutable) throw new IllegalStateException("Cant modify");
if (!dim.isShared()) {
throw new IllegalArgumentException("Dimensions added to a group must be shared.");
}
if (findDimensionLocal(dim.getShortName()) != null)
throw new IllegalArgumentException("Dimension name (" + dim.getShortName() + ") must be unique within Group " + getShortName());
dimensions.add(dim);
dim.setGroup(this);
} | java | {
"resource": ""
} |
q175522 | Group.addDimensionIfNotExists | test | public boolean addDimensionIfNotExists(Dimension dim) {
if (immutable) throw new IllegalStateException("Cant modify");
if (!dim.isShared()) {
throw new IllegalArgumentException("Dimensions added to a group must be shared.");
}
if (findDimensionLocal(dim.getShortName()) != null)
return false;
dimensions.add(dim);
dim.setGroup(this);
return true;
} | java | {
"resource": ""
} |
q175523 | Group.addGroup | test | public void addGroup(Group g) {
if (immutable) throw new IllegalStateException("Cant modify");
if (findGroup(g.getShortName()) != null)
throw new IllegalArgumentException("Group name (" + g.getShortName() + ") must be unique within Group " + getShortName());
groups.add(g);
g.setParentGroup(this); // groups are a tree - only one parent
} | java | {
"resource": ""
} |
q175524 | Group.addEnumeration | test | public void addEnumeration(EnumTypedef e) {
if (immutable) throw new IllegalStateException("Cant modify");
if (e == null) return;
e.setParentGroup(this);
enumTypedefs.add(e);
} | java | {
"resource": ""
} |
q175525 | Group.addVariable | test | public void addVariable(Variable v) {
if (immutable) throw new IllegalStateException("Cant modify");
if (v == null) return;
if (findVariable(v.getShortName()) != null) {
//Variable other = findVariable(v.getShortName()); // debug
throw new IllegalArgumentException("Variable name (" + v.getShortName() + ") must be unique within Group " + getShortName());
}
variables.add(v);
v.setParentGroup(this); // variable can only be in one group
} | java | {
"resource": ""
} |
q175526 | Group.removeDimension | test | public boolean removeDimension(String dimName) {
if (immutable) throw new IllegalStateException("Cant modify");
for (int i = 0; i < dimensions.size(); i++) {
Dimension d = dimensions.get(i);
if (dimName.equals(d.getShortName())) {
dimensions.remove(d);
return true;
}
}
return false;
} | java | {
"resource": ""
} |
q175527 | Group.makeRelativeGroup | test | public Group makeRelativeGroup(NetcdfFile ncf, String path, boolean ignorelast) {
path = path.trim();
path = path.replace("//", "/");
boolean isabsolute = (path.charAt(0) == '/');
if (isabsolute)
path = path.substring(1);
// iteratively create path
String pieces[] = path.split("/");
if (ignorelast) pieces[pieces.length - 1] = null;
Group current = (isabsolute ? ncfile.getRootGroup() : this);
for (String name : pieces) {
if (name == null) continue;
String clearname = NetcdfFile.makeNameUnescaped(name); //??
Group next = current.findGroup(clearname);
if (next == null) {
next = new Group(ncf, current, clearname);
current.addGroup(next);
}
current = next;
}
return current;
} | java | {
"resource": ""
} |
q175528 | DDS.convertDDSAliasFieldsToDASAliasFields | test | private String convertDDSAliasFieldsToDASAliasFields(String attribute) throws MalformedAliasException
{
String prefix = "";
Vector aNames = tokenizeAliasField(attribute);
// We know that the first token should be a dot, we look at the
// second token to see if it references a variable in the DDS.
String topName = (String) aNames.get(1);
boolean foundIt = false;
Enumeration e = getVariables();
while (e.hasMoreElements()) {
BaseType bt = (BaseType) e.nextElement();
String normName = normalize(bt.getEncodedName());
if (topName.equals(normName))
foundIt = true;
}
if (!foundIt) {
// The Attribute referenced is at the top level of the DDS itself.
// The Attributes at the top level of the DDS get repackaged into
// a special AttributeTable, this makes the Aliases that point to
// any of these Attribute resolve correctly.
prefix = "." + getLooseEndsTableName();
}
return (prefix + attribute);
} | java | {
"resource": ""
} |
q175529 | DDS.printDAS | test | public void printDAS(PrintWriter pw)
{
DAS myDAS = null;
try {
myDAS = this.getDAS();
myDAS.print(pw);
} catch (DASException dasE) {
pw.println("\n\nCould not get a DAS object to print!\n" +
"DDS.getDAS() threw an Exception. Message: \n" +
dasE.getMessage());
}
} | java | {
"resource": ""
} |
q175530 | DDS.getVariable | test | public BaseType getVariable(String name) throws NoSuchVariableException
{
Stack s = new Stack();
s = search(name, s);
return (BaseType) s.pop();
} | java | {
"resource": ""
} |
q175531 | GeotiffWriter.writeGrid | test | public void writeGrid(GridDataset dataset, GridDatatype grid, Array data, boolean greyScale) throws IOException {
GridCoordSystem gcs = grid.getCoordinateSystem();
if (!gcs.isRegularSpatial()) {
throw new IllegalArgumentException("Must have 1D x and y axes for " + grid.getFullName());
}
CoordinateAxis1D xaxis = (CoordinateAxis1D) gcs.getXHorizAxis();
CoordinateAxis1D yaxis = (CoordinateAxis1D) gcs.getYHorizAxis();
// units may need to be scaled to meters
double scaler = (xaxis.getUnitsString().equalsIgnoreCase("km")) ? 1000.0 : 1.0;
// data must go from top to bottom
double xStart = xaxis.getCoordEdge(0) * scaler;
double yStart = yaxis.getCoordEdge(0) * scaler;
double xInc = xaxis.getIncrement() * scaler;
double yInc = Math.abs(yaxis.getIncrement()) * scaler;
if (yaxis.getCoordValue(0) < yaxis.getCoordValue(1)) {
data = data.flip(0);
yStart = yaxis.getCoordEdge((int) yaxis.getSize()) * scaler;
}
if (!xaxis.isRegular() || !yaxis.isRegular()) {
throw new IllegalArgumentException("Must be evenly spaced grid = " + grid.getFullName());
}
if (pageNumber > 1) {
geotiff.initTags();
}
// write it out
writeGrid(grid, data, greyScale, xStart, yStart, xInc, yInc, pageNumber);
pageNumber++;
} | java | {
"resource": ""
} |
q175532 | GeotiffWriter.replaceMissingValues | test | private ArrayFloat replaceMissingValues(IsMissingEvaluator grid, Array data, MAMath.MinMax dataMinMax) {
float minValue = (float) (dataMinMax.min - 1.0);
ArrayFloat floatArray = (ArrayFloat) Array.factory(DataType.FLOAT, data.getShape());
IndexIterator dataIter = data.getIndexIterator();
IndexIterator floatIter = floatArray.getIndexIterator();
while (dataIter.hasNext()) {
float v = dataIter.getFloatNext();
if (grid.isMissing((double) v)) {
v = minValue;
}
floatIter.setFloatNext(v);
}
return floatArray;
} | java | {
"resource": ""
} |
q175533 | GeotiffWriter.replaceMissingValuesAndScale | test | private ArrayByte replaceMissingValuesAndScale(IsMissingEvaluator grid, Array data, MAMath.MinMax dataMinMax) {
double scale = 254.0 / (dataMinMax.max - dataMinMax.min);
ArrayByte byteArray = (ArrayByte) Array.factory(DataType.BYTE, data.getShape());
IndexIterator dataIter = data.getIndexIterator();
IndexIterator resultIter = byteArray.getIndexIterator();
byte bv;
while (dataIter.hasNext()) {
double v = dataIter.getDoubleNext();
if (grid.isMissing(v)) {
bv = 0;
} else {
int iv = (int) ((v - dataMinMax.min) * scale + 1);
bv = (byte) (iv & 0xff);
}
resultIter.setByteNext(bv);
}
return byteArray;
} | java | {
"resource": ""
} |
q175534 | GeotiffWriter.geoShiftGetXstart | test | private double geoShiftGetXstart(Array lon, double inc) {
Index ilon = lon.getIndex();
int[] lonShape = lon.getShape();
IndexIterator lonIter = lon.getIndexIterator();
double xlon = 0.0;
LatLonPoint p0 = new LatLonPointImpl(0, lon.getFloat(ilon.set(0)));
LatLonPoint pN = new LatLonPointImpl(0, lon.getFloat(ilon.set(lonShape[0] - 1)));
xlon = p0.getLongitude();
while (lonIter.hasNext()) {
float l = lonIter.getFloatNext();
LatLonPoint pn = new LatLonPointImpl(0, l);
if (pn.getLongitude() < xlon) {
xlon = pn.getLongitude();
}
}
if (p0.getLongitude() == pN.getLongitude()) {
xlon = xlon - inc;
}
return xlon;
} | java | {
"resource": ""
} |
q175535 | GeotiffWriter.writeGrid | test | public void writeGrid(GeoReferencedArray array, boolean greyScale) throws IOException {
CoverageCoordSys gcs = array.getCoordSysForData();
if (!gcs.isRegularSpatial())
throw new IllegalArgumentException("Must have 1D x and y axes for " + array.getCoverageName());
Projection proj = gcs.getProjection();
CoverageCoordAxis1D xaxis = (CoverageCoordAxis1D) gcs.getXAxis();
CoverageCoordAxis1D yaxis = (CoverageCoordAxis1D) gcs.getYAxis();
// latlon coord does not need to be scaled
double scaler = (xaxis.getUnits().equalsIgnoreCase("km")) ? 1000.0 : 1.0;
// data must go from top to bottom
double xStart = xaxis.getCoordEdge1(0) * scaler;
double yStart = yaxis.getCoordEdge1(0) * scaler;
double xInc = xaxis.getResolution() * scaler;
double yInc = Math.abs(yaxis.getResolution()) * scaler;
Array data = array.getData().reduce();
if (yaxis.getCoordMidpoint(0) < yaxis.getCoordMidpoint(1)) {
data = data.flip(0);
yStart = yaxis.getCoordEdgeLast();
}
/* remove - i think unneeded, monotonic lon handled in CoordinateAxis1D. JC 3/18/2013
if (gcs.isLatLon()) {
Array lon = xaxis.read();
data = geoShiftDataAtLon(data, lon);
xStart = geoShiftGetXstart(lon, xInc);
//xStart = -180.0;
} */
if (pageNumber > 1) {
geotiff.initTags();
}
// write the data first
int nextStart = 0;
MAMath.MinMax dataMinMax = MAMath.getMinMaxSkipMissingData(data, array);
if (greyScale) {
ArrayByte result = replaceMissingValuesAndScale(array, data, dataMinMax);
nextStart = geotiff.writeData((byte[]) result.getStorage(), pageNumber);
} else {
ArrayFloat result = replaceMissingValues(array, data, dataMinMax);
nextStart = geotiff.writeData((float[]) result.getStorage(), pageNumber);
}
// set the width and the height
int height = data.getShape()[0]; // Y
int width = data.getShape()[1]; // X
writeMetadata(greyScale, xStart, yStart, xInc, yInc, height, width, pageNumber, nextStart, dataMinMax, proj);
pageNumber++;
} | java | {
"resource": ""
} |
q175536 | WFSExceptionWriter.write | test | public void write(HttpServletResponse hsr) throws IOException{
PrintWriter xmlResponse = hsr.getWriter();
xmlResponse.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
xmlResponse.append("<ows:ExceptionReport xml:lang=\"en-US\" xsi:schemaLocation=\"http://www.opengis.net/ows/1.1"
+ " http://schemas.opengis.net/ows/1.1.0/owsExceptionReport.xsd\" version=\"2.0.0\" xmlns:ows=\"http://www.opengis.net/ows/1.1\""
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">");
xmlResponse.append("<ows:Exception ");
if(locator != null) xmlResponse.append("locator=\"" + locator + "\" ");
xmlResponse.append("exceptionCode=\"" + ExceptionCode + "\">");
xmlResponse.append("<ows:ExceptionText>" + text + "</ows:ExceptionText>");
xmlResponse.append("</ows:Exception>");
xmlResponse.append("</ows:ExceptionReport>");
} | java | {
"resource": ""
} |
q175537 | Nc4wrapper.nc_inq_libvers | test | @Override
public synchronized
String nc_inq_libvers() {
String ret;
try {ce();
ret = nc4.nc_inq_libvers();
if(TRACE) trace(ret,"nc_inq_libvers","-");
} finally {cx();}
return ret;
} | java | {
"resource": ""
} |
q175538 | GribCdmIndex.makeTopIndexFileFromConfig | test | private static File makeTopIndexFileFromConfig(FeatureCollectionConfig config) {
Formatter errlog = new Formatter();
CollectionSpecParser specp = config.getCollectionSpecParser(errlog);
String name = StringUtil2.replace(config.collectionName, '\\', "/");
// String cname = DirectoryCollection.makeCollectionName(name, Paths.get(specp.getRootDir()));
return makeIndexFile(name, new File(specp.getRootDir()));
} | java | {
"resource": ""
} |
q175539 | GribCdmIndex.getType | test | public static GribCollectionType getType(RandomAccessFile raf) throws IOException {
String magic;
raf.seek(0);
magic = raf.readString(Grib2CollectionWriter.MAGIC_START.getBytes(CDM.utf8Charset).length);
switch (magic) {
case Grib2CollectionWriter.MAGIC_START:
return GribCollectionType.GRIB2;
case Grib1CollectionWriter.MAGIC_START:
return GribCollectionType.GRIB1;
case Grib2PartitionBuilder.MAGIC_START:
return GribCollectionType.Partition2;
case Grib1PartitionBuilder.MAGIC_START:
return GribCollectionType.Partition1;
}
return GribCollectionType.none;
} | java | {
"resource": ""
} |
q175540 | GribCdmIndex.updateGribCollection | test | public static boolean updateGribCollection(FeatureCollectionConfig config, CollectionUpdateType updateType, Logger logger) throws IOException {
if (logger == null) logger = classLogger;
long start = System.currentTimeMillis();
Formatter errlog = new Formatter();
CollectionSpecParser specp = config.getCollectionSpecParser(errlog);
Path rootPath = Paths.get(specp.getRootDir());
boolean isGrib1 = config.type == FeatureCollectionType.GRIB1;
boolean changed;
if (config.ptype == FeatureCollectionConfig.PartitionType.none || config.ptype == FeatureCollectionConfig.PartitionType.all) {
try (CollectionAbstract dcm = new CollectionPathMatcher(config, specp, logger)) {
changed = updateGribCollection(isGrib1, dcm, updateType, FeatureCollectionConfig.PartitionType.none, logger, errlog);
}
} else if (config.ptype == FeatureCollectionConfig.PartitionType.timePeriod) {
try (TimePartition tp = new TimePartition(config, specp, logger)) {
changed = updateTimePartition(isGrib1, tp, updateType, logger);
}
} else {
// LOOK assume wantSubdirs makes it into a Partition. Isnt there something better ??
if (specp.wantSubdirs()) { // its a partition
try (DirectoryPartition dpart = new DirectoryPartition(config, rootPath, true, new GribCdmIndex(logger), NCX_SUFFIX, logger)) {
dpart.putAuxInfo(FeatureCollectionConfig.AUX_CONFIG, config);
changed = updateDirectoryCollectionRecurse(isGrib1, dpart, config, updateType, logger);
}
} else { // otherwise its a leaf directory
changed = updateLeafCollection(isGrib1, config, updateType, true, logger, rootPath);
}
}
long took = System.currentTimeMillis() - start;
logger.info("updateGribCollection {} changed {} took {} msecs", config.collectionName, changed, took);
return changed;
} | java | {
"resource": ""
} |
q175541 | GribCdmIndex.updateLeafCollection | test | private static boolean updateLeafCollection(boolean isGrib1, FeatureCollectionConfig config,
CollectionUpdateType updateType, boolean isTop,
Logger logger, Path dirPath) throws IOException {
if (config.ptype == FeatureCollectionConfig.PartitionType.file) {
return updateFilePartition(isGrib1, config, updateType, isTop, logger, dirPath);
} else {
Formatter errlog = new Formatter();
CollectionSpecParser specp = config.getCollectionSpecParser(errlog);
try (DirectoryCollection dcm = new DirectoryCollection(config.collectionName, dirPath, isTop, config.olderThan, logger)) {
dcm.putAuxInfo(FeatureCollectionConfig.AUX_CONFIG, config);
if (specp.getFilter() != null)
dcm.setStreamFilter(new StreamFilter(specp.getFilter(), specp.getFilterOnName()));
boolean changed = updateGribCollection(isGrib1, dcm, updateType, FeatureCollectionConfig.PartitionType.directory, logger, errlog);
logger.debug(" GribCdmIndex.updateDirectoryPartition was updated=%s on %s%n", changed, dirPath);
return changed;
}
}
} | java | {
"resource": ""
} |
q175542 | GribCdmIndex.openGribCollectionFromRaf | test | public static GribCollectionImmutable openGribCollectionFromRaf(RandomAccessFile raf, FeatureCollectionConfig config,
CollectionUpdateType updateType, org.slf4j.Logger logger) throws IOException {
GribCollectionImmutable result;
// check if its a plain ole GRIB1/2 data file
boolean isGrib1 = false;
boolean isGrib2 = Grib2RecordScanner.isValidFile(raf);
if (!isGrib2) isGrib1 = Grib1RecordScanner.isValidFile(raf);
if (isGrib1 || isGrib2) {
result = openGribCollectionFromDataFile(isGrib1, raf, config, updateType, null, logger);
// close the data file, the ncx raf file is managed by gribCollection
raf.close();
} else { // check its an ncx file
result = openGribCollectionFromIndexFile(raf, config, logger);
}
return result;
} | java | {
"resource": ""
} |
q175543 | GribCdmIndex.openGribCollectionFromDataFile | test | private static GribCollectionImmutable openGribCollectionFromDataFile(boolean isGrib1, RandomAccessFile dataRaf, FeatureCollectionConfig config,
CollectionUpdateType updateType, Formatter errlog, org.slf4j.Logger logger) throws IOException {
String filename = dataRaf.getLocation();
File dataFile = new File(filename);
MFile mfile = new MFileOS(dataFile);
return openGribCollectionFromDataFile(isGrib1, mfile, updateType, config, errlog, logger);
} | java | {
"resource": ""
} |
q175544 | GribCdmIndex.openGribCollectionFromDataFile | test | @Nullable
public static GribCollectionImmutable openGribCollectionFromDataFile(boolean isGrib1, MFile mfile, CollectionUpdateType updateType,
FeatureCollectionConfig config, Formatter errlog, org.slf4j.Logger logger) throws IOException {
MCollection dcm = new CollectionSingleFile(mfile, logger);
dcm.putAuxInfo(FeatureCollectionConfig.AUX_CONFIG, config);
if (isGrib1) {
Grib1CollectionBuilder builder = new Grib1CollectionBuilder(dcm.getCollectionName(), dcm, logger); // LOOK ignoring partition type
boolean changed = (builder.updateNeeded(updateType) && builder.createIndex(FeatureCollectionConfig.PartitionType.file, errlog));
} else {
Grib2CollectionBuilder builder = new Grib2CollectionBuilder(dcm.getCollectionName(), dcm, logger);
boolean changed = (builder.updateNeeded(updateType) && builder.createIndex(FeatureCollectionConfig.PartitionType.file, errlog));
}
// the index file should now exist, open it
GribCollectionImmutable result = openCdmIndex(dcm.getIndexFilename(NCX_SUFFIX), config, true, logger);
if (result != null) return result;
// if open fails, force recreate the index
if (updateType == CollectionUpdateType.never) return null; // not allowed to write
if (updateType == CollectionUpdateType.always) return null;// already tried to force write, give up
return openGribCollectionFromDataFile(isGrib1, mfile, CollectionUpdateType.always, config, errlog, logger);
} | java | {
"resource": ""
} |
q175545 | RC.urlMatch | test | static boolean
urlMatch(URL pattern, URL url) {
int relation;
if (pattern == null)
return (url == null);
if (!(url.getHost().endsWith(pattern.getHost())))
return false; // e.g. pattern=x.y.org url=y.org
if (!(url.getPath().startsWith(pattern.getPath())))
return false; // e.g. pattern=y.org/a/b url=y.org/a
if (pattern.getPort() > 0 && pattern.getPort() != url.getPort())
return false;
// note: all other fields are ignored
return true;
} | java | {
"resource": ""
} |
q175546 | RC.add | test | static synchronized public void
add(String key, String value, String url) {
if (key == null) return;
if (!initialized) RC.initialize();
Triple t = new Triple(key, value, url);
dfaltRC.insert(t);
// recompute well-knowns
setWellKnown();
} | java | {
"resource": ""
} |
q175547 | RC.find | test | static synchronized public String
find(String key, String url) {
if (key == null) return null;
if (!initialized) RC.initialize();
Triple t = dfaltRC.lookup(key, url);
return (t == null ? null : t.value);
} | java | {
"resource": ""
} |
q175548 | RC.setWellKnown | test | static void
setWellKnown() {
if (dfaltRC.triplestore.size() == 0) return;
// Walk the set of triples looking for those that have no url
for (String key : dfaltRC.keySet()) {
Triple triple = dfaltRC.lookup(key);
if (triple.url == null) {
RC.set(key, triple.value); // let set sort it out
}
}
} | java | {
"resource": ""
} |
q175549 | RC.load | test | public boolean
load(String abspath) {
abspath = abspath.replace('\\', '/');
File rcFile = new File(abspath);
if (!rcFile.exists() || !rcFile.canRead()) {
return false;
}
if (showlog) log.debug("Loading rc file: " + abspath);
try (BufferedReader rdr = new BufferedReader(new InputStreamReader(new FileInputStream(rcFile), CDM.UTF8))) {
for (int lineno = 1; ; lineno++) {
URL url = null;
String line = rdr.readLine();
if (line == null) break;
// trim leading blanks
line = line.trim();
if (line.length() == 0) continue; // empty line
if (line.charAt(0) == '#') continue; // check for comment
// parse the line
if (line.charAt(0) == LTAG) {
int rindex = line.indexOf(RTAG);
if (rindex < 0) return false;
if (showlog) log.error("Malformed [url] at " + abspath + "." + lineno);
String surl = line.substring(1, rindex);
try {
url = new URL(surl);
} catch (MalformedURLException mue) {
if (showlog) log.error("Malformed [url] at " + abspath + "." + lineno);
}
line = line.substring(rindex + 1);
// trim again
line = line.trim();
}
// Get the key,value part
String[] pieces = line.split("\\s*=\\s*");
assert (pieces.length == 1 || pieces.length == 2);
// Create the triple
String value = "1";
if (pieces.length == 2) value = pieces[1].trim();
Triple triple = new Triple(pieces[0].trim(), value, url);
List<Triple> list = triplestore.get(triple.key);
if (list == null) list = new ArrayList<Triple>();
Triple prev = addtriple(list, triple);
triplestore.put(triple.key, list);
}
} catch (FileNotFoundException fe) {
if (showlog) log.debug("Loading rc file: " + abspath);
return false;
} catch (IOException ioe) {
if (showlog) log.error("File " + abspath + ": IO exception: " + ioe.getMessage());
return false;
}
return true;
} | java | {
"resource": ""
} |
q175550 | RC.insert | test | public Triple
insert(Triple t) {
if (t.key == null) return null;
List<Triple> list = triplestore.get(t.key);
if (list == null) list = new ArrayList<Triple>();
Triple prev = addtriple(list, t);
triplestore.put(t.key, list);
return prev;
} | java | {
"resource": ""
} |
q175551 | DatasetNode.getDatasetsLocal | test | public List<Dataset> getDatasetsLocal() {
List<Dataset> datasets = (List<Dataset>) flds.get(Dataset.Datasets);
return datasets == null ? new ArrayList<>(0) : datasets;
} | java | {
"resource": ""
} |
q175552 | DatasetNode.findDatasetByName | test | public Dataset findDatasetByName(String name) {
for (Dataset ds : getDatasets()) {
if (ds.getName().equals(name)) return ds;
Dataset result = ds.findDatasetByName(name);
if (result != null) return result;
}
return null;
} | java | {
"resource": ""
} |
q175553 | GisFeatureRendererMulti.setProjection | test | public void setProjection(ProjectionImpl project) {
displayProject = project;
if (featSetList == null)
return;
Iterator iter = featSetList.iterator();
while (iter.hasNext()) {
FeatureSet fs = (FeatureSet) iter.next();
fs.newProjection = true;
}
} | java | {
"resource": ""
} |
q175554 | GisFeatureRendererMulti.getShapes | test | protected Iterator getShapes(java.awt.Graphics2D g, AffineTransform normal2device) {
long startTime = System.currentTimeMillis();
if (featSetList == null) {
initFeatSetList();
assert !featSetList.isEmpty();
}
// which featureSet should we ue?
FeatureSet fs = (FeatureSet) featSetList.get(0);
if (featSetList.size() > 1) {
// compute scale
double scale = 1.0;
try {
AffineTransform world2device = g.getTransform();
AffineTransform world2normal = normal2device.createInverse();
world2normal.concatenate( world2device);
scale = Math.max(Math.abs(world2normal.getScaleX()), Math.abs(world2normal.getShearX())); // drawing or printing
if (Debug.isSet("GisFeature/showTransform")) {
System.out.println("GisFeature/showTransform: "+world2normal+ "\n scale = "+ scale);
}
} catch ( java.awt.geom.NoninvertibleTransformException e) {
System.out.println( " GisRenderFeature: NoninvertibleTransformException on " + normal2device);
}
if (!displayProject.isLatLon())
scale *= 111.0; // km/deg
double minD = Double.MAX_VALUE;
for (Object aFeatSetList : featSetList) {
FeatureSet tryfs = (FeatureSet) aFeatSetList;
double d = Math.abs(scale * tryfs.minDist - pixelMatch); // we want min features ~ 2 pixels
if (d < minD) {
minD = d;
fs = tryfs;
}
}
if (Debug.isSet("GisFeature/MapResolution")) {
System.out.println("GisFeature/MapResolution: scale = "+scale+" minDist = "+fs.minDist);
}
}
// we may have deferred the actual creation of the points
if (fs.featureList == null)
fs.createFeatures();
// ok, now see if we need to project
if (!displayProject.equals(fs.project)) {
fs.setProjection( displayProject);
} else { // deal with LatLon
if (fs.newProjection && displayProject.isLatLon()) {
fs.setProjection( displayProject);
}
}
fs.newProjection = false;
if (Debug.isSet("GisFeature/timing/getShapes")) {
long tookTime = System.currentTimeMillis() - startTime;
System.out.println("timing.getShapes: " + tookTime*.001 + " seconds");
}
// so return it, already
return fs.getShapes();
} | java | {
"resource": ""
} |
q175555 | GisFeatureRendererMulti.makeShapes | test | private ArrayList makeShapes(Iterator featList) {
Shape shape;
ArrayList shapeList = new ArrayList();
ProjectionImpl dataProject = getDataProjection();
if (Debug.isSet("GisFeature/MapDraw")) {
System.out.println("GisFeature/MapDraw: makeShapes with "+displayProject);
}
/* if (Debug.isSet("bug.drawShapes")) {
int count =0;
// make each GisPart a seperate shape for debugging
feats:while (featList.hasNext()) {
AbstractGisFeature feature = (AbstractGisFeature) featList.next();
java.util.Iterator pi = feature.getGisParts();
while (pi.hasNext()) {
GisPart gp = (GisPart) pi.next();
int np = gp.getNumPoints();
GeneralPath path = new GeneralPath(GeneralPath.WIND_EVEN_ODD, np);
double[] xx = gp.getX();
double[] yy = gp.getY();
path.moveTo((float) xx[0], (float) yy[0]);
if (count == 63)
System.out.println("moveTo x ="+xx[0]+" y= "+yy[0]);
for(int i = 1; i < np; i++) {
path.lineTo((float) xx[i], (float) yy[i]);
if (count == 63)
System.out.println("lineTo x ="+xx[i]+" y= "+yy[i]);
}
shapeList.add(path);
if (count == 63)
break feats;
count++;
}
}
System.out.println("bug.drawShapes: #shapes =" +shapeList.size());
return shapeList;
} */
while (featList.hasNext()) {
AbstractGisFeature feature = (AbstractGisFeature) featList.next();
if (dataProject.isLatLon()) // always got to run it through if its lat/lon
shape = feature.getProjectedShape(displayProject);
else if (dataProject == displayProject)
shape = feature.getShape();
else
shape = feature.getProjectedShape(dataProject, displayProject);
shapeList.add(shape);
}
return shapeList;
} | java | {
"resource": ""
} |
q175556 | PrefixName.compareTo | test | public final int
compareTo(String string)
{
return getID().length() >= string.length()
? getID().compareToIgnoreCase(string)
: getID().compareToIgnoreCase(
string.substring(0, getID().length()));
} | java | {
"resource": ""
} |
q175557 | StopButton.startProgressMonitorTask | test | public boolean startProgressMonitorTask( ProgressMonitorTask pmt) {
if (busy) return false;
busy = true;
this.task = pmt;
isCancelled = false;
count = 0;
setIcon(icon[0]);
// create timer, whose events happen on the awt event Thread
ActionListener watcher = new ActionListener() {
public void actionPerformed(ActionEvent evt) {
//System.out.println("timer event"+evt);
if (isCancelled && !task.isCancel()) {
task.cancel();
if (debug) System.out.println(" task.cancel");
return; // give it a chance to finish up
} else {
// indicate progress
count++;
setIcon( icon[count % 2]);
if (debug) System.out.println(" stop count="+count);
}
// need to make sure task acknowledges the cancel; so dont shut down
// until the task is done
if (task.isDone()) {
if (myTimer != null)
myTimer.stop();
myTimer = null;
if (task.isError())
javax.swing.JOptionPane.showMessageDialog(null, task.getErrorMessage());
if (task.isSuccess())
fireEvent( new ActionEvent(this, 0, "success"));
else if (task.isError())
fireEvent( new ActionEvent(this, 0, "error"));
else if (task.isCancel())
fireEvent( new ActionEvent(this, 0, "cancel"));
else
fireEvent( new ActionEvent(this, 0, "done"));
busy = false;
}
}
};
myTimer = new javax.swing.Timer(1000, watcher); // every second
myTimer.start();
// do task in a seperate, non-event, thread
Thread taskThread = new Thread(task);
taskThread.start();
return true;
} | java | {
"resource": ""
} |
q175558 | GribPartitionBuilder.needsUpdate | test | private boolean needsUpdate(CollectionUpdateType ff, File collectionIndexFile) throws IOException {
long collectionLastModified = collectionIndexFile.lastModified();
Set<String> newFileSet = new HashSet<>();
for (MCollection dcm : partitionManager.makePartitions(CollectionUpdateType.test)) {
String partitionIndexFilename = StringUtil2.replace(dcm.getIndexFilename(GribCdmIndex.NCX_SUFFIX), '\\', "/");
File partitionIndexFile = GribIndexCache.getExistingFileOrCache(partitionIndexFilename);
if (partitionIndexFile == null) // make sure each partition has an index
return true;
if (collectionLastModified < partitionIndexFile.lastModified()) // and the partition index is earlier than the collection index
return true;
newFileSet.add(partitionIndexFilename);
}
if (ff == CollectionUpdateType.testIndexOnly) return false;
// now see if any files were deleted
GribCdmIndex reader = new GribCdmIndex(logger);
List<MFile> oldFiles = new ArrayList<>();
reader.readMFiles(collectionIndexFile.toPath(), oldFiles);
Set<String> oldFileSet = new HashSet<>();
for (MFile oldFile : oldFiles) {
if (!newFileSet.contains(oldFile.getPath()))
return true; // got deleted - must recreate the index
oldFileSet.add(oldFile.getPath());
}
// now see if any files were added
for (String newFilename : newFileSet) {
if (!oldFileSet.contains(newFilename))
return true; // got added - must recreate the index
}
return false;
} | java | {
"resource": ""
} |
q175559 | EnsCoord.normalize | test | static public void normalize(EnsCoord result, List<EnsCoord> ecList) {
List<EnsCoord> extra = new ArrayList<>();
for (EnsCoord ec : ecList) {
if (!result.equalsData(ec)) {
// differences can only be greater
extra.add(ec);
}
}
if (extra.size() == 0)
return;
for (EnsCoord ec : extra) {
if (ec.getNEnsembles() < result.getNEnsembles())
continue;
result = ec;
}
} | java | {
"resource": ""
} |
q175560 | ArrayStructure.setObject | test | public void setObject(int index, Object value) {
if (sdata == null)
sdata = new StructureData[nelems];
sdata[index] = (StructureData) value;
} | java | {
"resource": ""
} |
q175561 | ArrayStructure.getStructureData | test | public StructureData getStructureData(int index) {
if (sdata == null)
sdata = new StructureData[nelems];
if (index >= sdata.length)
throw new IllegalArgumentException(index + " > " + sdata.length);
if (sdata[index] == null)
sdata[index] = makeStructureData(this, index);
return sdata[index];
} | java | {
"resource": ""
} |
q175562 | ArrayStructure.copyStructures | test | protected void copyStructures(int recnum, StructureMembers.Member m, IndexIterator result) {
Array data = getArray(recnum, m);
IndexIterator dataIter = data.getIndexIterator();
while (dataIter.hasNext())
result.setObjectNext(dataIter.getObjectNext());
} | java | {
"resource": ""
} |
q175563 | ArrayStructure.getScalarObject | test | public Object getScalarObject(int recno, StructureMembers.Member m) {
DataType dataType = m.getDataType();
if (dataType == DataType.DOUBLE) {
return getScalarDouble(recno, m);
} else if (dataType == DataType.FLOAT) {
return getScalarFloat(recno, m);
} else if (dataType.getPrimitiveClassType() == byte.class) {
return getScalarByte(recno, m);
} else if (dataType.getPrimitiveClassType() == short.class) {
return getScalarShort(recno, m);
} else if (dataType.getPrimitiveClassType() == int.class) {
return getScalarInt(recno, m);
} else if (dataType.getPrimitiveClassType() == long.class) {
return getScalarLong(recno, m);
} else if (dataType == DataType.CHAR) {
return getScalarString(recno, m);
} else if (dataType == DataType.STRING) {
return getScalarString(recno, m);
} else if (dataType == DataType.STRUCTURE) {
return getScalarStructure(recno, m);
} else if (dataType == DataType.OPAQUE) {
ArrayObject data = (ArrayObject) m.getDataArray();
return data.getObject(recno * m.getSize()); // LOOK ??
}
throw new RuntimeException("Dont have implementation for " + dataType);
} | java | {
"resource": ""
} |
q175564 | ArrayStructure.convertScalarDouble | test | public double convertScalarDouble(int recnum, StructureMembers.Member m) {
if (m.getDataType() == DataType.DOUBLE) return getScalarDouble(recnum, m);
if (m.getDataType() == DataType.FLOAT) return (double) getScalarFloat(recnum, m);
Object o = getScalarObject(recnum, m);
if (o instanceof Number) return ((Number) o).doubleValue();
throw new ForbiddenConversionException("Type is " + m.getDataType() + ", not convertible to double");
} | java | {
"resource": ""
} |
q175565 | ArrayStructure.convertScalarInt | test | public int convertScalarInt(int recnum, StructureMembers.Member m) {
if (m.getDataType() == DataType.INT || m.getDataType() == DataType.UINT) return getScalarInt(recnum, m);
if (m.getDataType() == DataType.SHORT) return (int) getScalarShort(recnum, m);
if (m.getDataType() == DataType.USHORT) return DataType.unsignedShortToInt( getScalarShort(recnum, m));
if (m.getDataType() == DataType.BYTE) return (int) getScalarByte(recnum, m);
if (m.getDataType() == DataType.UBYTE) return (int) DataType.unsignedByteToShort( getScalarByte(recnum, m));
if (m.getDataType() == DataType.LONG || m.getDataType() == DataType.ULONG) return (int) getScalarLong(recnum, m);
Object o = getScalarObject(recnum, m);
if (o instanceof Number) return ((Number) o).intValue();
throw new ForbiddenConversionException("Type is " + m.getDataType() + ", not convertible to int");
} | java | {
"resource": ""
} |
q175566 | ArrayStructure.getScalarFloat | test | public float getScalarFloat(int recnum, StructureMembers.Member m) {
if (m.getDataType() != DataType.FLOAT)
throw new IllegalArgumentException("Type is " + m.getDataType() + ", must be float");
Array data = m.getDataArray();
return data.getFloat(recnum * m.getSize()); // gets first one in the array
} | java | {
"resource": ""
} |
q175567 | ArrayStructure.getScalarByte | test | public byte getScalarByte(int recnum, StructureMembers.Member m) {
if (!(m.getDataType().getPrimitiveClassType() == byte.class))
throw new IllegalArgumentException("Type is " + m.getDataType() + ", must be byte");
Array data = m.getDataArray();
return data.getByte(recnum * m.getSize()); // gets first one in the array
} | java | {
"resource": ""
} |
q175568 | ArrayStructure.getScalarShort | test | public short getScalarShort(int recnum, StructureMembers.Member m) {
if (!(m.getDataType().getPrimitiveClassType() == short.class))
throw new IllegalArgumentException("Type is " + m.getDataType() + ", must be short");
Array data = m.getDataArray();
return data.getShort(recnum * m.getSize()); // gets first one in the array
} | java | {
"resource": ""
} |
q175569 | ArrayStructure.getScalarChar | test | public char getScalarChar(int recnum, StructureMembers.Member m) {
if (m.getDataType() != DataType.CHAR)
throw new IllegalArgumentException("Type is " + m.getDataType() + ", must be char");
Array data = m.getDataArray();
return data.getChar(recnum * m.getSize()); // gets first one in the array
} | java | {
"resource": ""
} |
q175570 | ArrayStructure.getScalarString | test | public String getScalarString(int recnum, StructureMembers.Member m) {
if (m.getDataType() == DataType.CHAR) {
ArrayChar data = (ArrayChar) m.getDataArray();
return data.getString(recnum);
}
if (m.getDataType() == DataType.STRING) {
Array data = m.getDataArray();
return (String) data.getObject(recnum);
}
throw new IllegalArgumentException("Type is " + m.getDataType() + ", must be String or char");
} | java | {
"resource": ""
} |
q175571 | ArrayStructure.getArrayStructure | test | public ArrayStructure getArrayStructure(int recnum, StructureMembers.Member m) {
if ((m.getDataType() != DataType.STRUCTURE) && (m.getDataType() != DataType.SEQUENCE))
throw new IllegalArgumentException("Type is " + m.getDataType() + ", must be Structure or Sequence");
if (m.getDataType() == DataType.SEQUENCE)
return getArraySequence(recnum, m);
ArrayStructure array = (ArrayStructure) m.getDataArray();
int count = m.getSize();
StructureData[] this_sdata = new StructureData[count];
for (int i = 0; i < count; i++)
this_sdata[i] = array.getStructureData(recnum * count + i);
// make a copy of the members, but remove the data arrays, since the structureData must be used instead
StructureMembers membersw = new StructureMembers(array.getStructureMembers());
return new ArrayStructureW(membersw, m.getShape(), this_sdata);
} | java | {
"resource": ""
} |
q175572 | ArrayStructure.getArraySequence | test | public ArraySequence getArraySequence(int recnum, StructureMembers.Member m) {
if (m.getDataType() != DataType.SEQUENCE)
throw new IllegalArgumentException("Type is " + m.getDataType() + ", must be Sequence");
// should store sequences as ArrayObject of ArraySequence objects
ArrayObject array = (ArrayObject) m.getDataArray();
return (ArraySequence) array.getObject(recnum);
} | java | {
"resource": ""
} |
q175573 | ArrayStructure.getArrayObject | test | public ArrayObject getArrayObject(int recnum, StructureMembers.Member m) {
if (m.getDataType() != DataType.OPAQUE)
throw new IllegalArgumentException("Type is " + m.getDataType() + ", must be Sequence");
ArrayObject array = (ArrayObject) m.getDataArray();
return (ArrayObject) array.getObject(recnum); // LOOK ??
} | java | {
"resource": ""
} |
q175574 | CoreTypeFcns.minmax | test | static protected long
minmax(long value, long min, long max)
{
if(value < min) return min;
if(value > max) return max;
return value;
} | java | {
"resource": ""
} |
q175575 | CEConstraint.eval | test | protected Object
eval(DapVariable var, DapSequence seq, DataCursor record, CEAST expr)
throws DapException
{
switch (expr.sort) {
case CONSTANT:
return expr.value;
case SEGMENT:
return fieldValue(var, seq, record, expr.name);
case EXPR:
Object lhs = eval(var, seq, record, expr.lhs);
Object rhs = (expr.rhs == null ? null : eval(var, seq, record, expr.rhs));
if(rhs != null)
switch (expr.op) {
case LT:
return compare(lhs, rhs) < 0;
case LE:
return compare(lhs, rhs) <= 0;
case GT:
return compare(lhs, rhs) > 0;
case GE:
return compare(lhs, rhs) >= 0;
case EQ:
return lhs.equals(rhs);
case NEQ:
return !lhs.equals(rhs);
case REQ:
return lhs.toString().matches(rhs.toString());
case AND:
return ((Boolean) lhs) && ((Boolean) rhs);
}
else switch (expr.op) {
case NOT:
return !((Boolean) lhs);
}
}
throw new DapException("Malformed Filter");
} | java | {
"resource": ""
} |
q175576 | CEConstraint.toConstraintString | test | public String toConstraintString()
{
StringBuilder buf = new StringBuilder();
boolean first = true;
for(int i = 0; i < segments.size(); i++) {
Segment seg = segments.get(i);
if(!seg.var.isTopLevel())
continue;
if(!first) buf.append(";");
first = false;
dumpvar(seg, buf, true);
}
return buf.toString();
} | java | {
"resource": ""
} |
q175577 | CEConstraint.references | test | public boolean
references(DapNode node)
{
boolean isref = false;
switch (node.getSort()) {
case DIMENSION:
DapDimension dim = this.redef.get((DapDimension) node);
if(dim == null) dim = (DapDimension) node;
isref = this.dimrefs.contains(dim);
break;
case ENUMERATION:
isref = (this.enums.contains((DapEnumeration) node));
break;
case VARIABLE:
isref = (findVariableIndex((DapVariable) node) >= 0);
break;
case GROUP:
case DATASET:
isref = (this.groups.contains((DapGroup) node));
break;
default:
break;
}
return isref;
} | java | {
"resource": ""
} |
q175578 | CEConstraint.matches | test | protected boolean
matches(DapVariable var, DapSequence seq, DataCursor rec, CEAST filter)
throws DapException
{
Object value = eval(var, seq, rec, filter);
return ((Boolean) value);
} | java | {
"resource": ""
} |
q175579 | CEConstraint.expansionCount | test | protected int
expansionCount(DapStructure struct)
{
int count = 0;
for(DapVariable field : struct.getFields()) {
if(findVariableIndex(field) >= 0) count++;
}
return count;
} | java | {
"resource": ""
} |
q175580 | CEConstraint.computeenums | test | protected void computeenums()
{
for(int i = 0; i < variables.size(); i++) {
DapVariable var = variables.get(i);
if(var.getSort() != DapSort.VARIABLE)
continue;
DapType daptype = var.getBaseType();
if(!daptype.isEnumType())
continue;
if(!this.enums.contains((DapEnumeration) daptype))
this.enums.add((DapEnumeration) daptype);
}
} | java | {
"resource": ""
} |
q175581 | CEConstraint.computegroups | test | protected void computegroups()
{
// 1. variables
for(int i = 0; i < variables.size(); i++) {
DapVariable var = variables.get(i);
List<DapGroup> path = var.getGroupPath();
for(DapGroup group : path) {
if(!this.groups.contains(group))
this.groups.add(group);
}
}
// 2. Dimensions
for(DapDimension dim : this.dimrefs) {
if(!dim.isShared())
continue;
List<DapGroup> path = dim.getGroupPath();
for(DapGroup group : path) {
if(!this.groups.contains(group))
this.groups.add(group);
}
}
// 2. enumerations
for(DapEnumeration en : this.enums) {
List<DapGroup> path = en.getGroupPath();
for(DapGroup group : path) {
if(!this.groups.contains(group))
this.groups.add(group);
}
}
} | java | {
"resource": ""
} |
q175582 | CEConstraint.compile | test | static public CEConstraint
compile(String sce, DapDataset dmr)
throws DapException
{
// Process any constraint
if(sce == null || sce.length() == 0)
return CEConstraint.getUniversal(dmr);
CEParserImpl ceparser = new CEParserImpl(dmr);
if(PARSEDEBUG)
ceparser.setDebugLevel(1);
if(DEBUG) {
System.err.println("Dap4Servlet: parsing constraint: |" + sce + "|");
}
boolean ok;
try {
ok = ceparser.parse(sce);
} catch (ParseException pe) {
ok = false;
}
if(!ok)
throw new DapException("Constraint parse failed: " + sce);
CEAST root = ceparser.getCEAST();
CECompiler compiler = new CECompiler();
CEConstraint ce = compiler.compile(dmr, root);
ce.expand();
ce.finish();
return ce;
} | java | {
"resource": ""
} |
q175583 | Grib2Iosp.isValidFile | test | @Override
public boolean isValidFile(RandomAccessFile raf) throws IOException {
if (raf instanceof HTTPRandomAccessFile) { // only do remote if memory resident
if (raf.length() > raf.getBufferSize())
return false;
} else { // wont accept remote index
GribCdmIndex.GribCollectionType type = GribCdmIndex.getType(raf);
if (type == GribCdmIndex.GribCollectionType.GRIB2) return true;
if (type == GribCdmIndex.GribCollectionType.Partition2) return true;
}
// check for GRIB2 data file
return Grib2RecordScanner.isValidFile(raf);
} | java | {
"resource": ""
} |
q175584 | ThreddsUI.makeActionsSystem | test | private void makeActionsSystem() {
/* aboutAction = new AbstractAction() {
public void actionPerformed(ActionEvent evt) {
new AboutWindow();
}
};
BAMutil.setActionProperties( aboutAction, null, "About", false, 'A', 0); */
/* printAction = new AbstractAction() {
public void actionPerformed(ActionEvent e) {
PrinterJob printJob = PrinterJob.getPrinterJob();
PageFormat pf = printJob.defaultPage();
// do we need to rotate ??
if (panz.wantRotate( pf.getImageableWidth(), pf.getImageableHeight()))
pf.setOrientation( PageFormat.LANDSCAPE);
else
pf.setOrientation(PageFormat.PORTRAIT);
printJob.setPrintable(controller.getPrintable(), pf);
if (printJob.printDialog()) {
try {
if (Debug.isSet("print.job")) System.out.println("call printJob.print");
printJob.print();
if (Debug.isSet("print.job")) System.out.println(" printJob done");
} catch (Exception PrintException) {
PrintException.printStackTrace();
}
}
}
};
BAMutil.setActionProperties( printAction, "Print", "Print...", false, 'P', KeyEvent.VK_P);
sysConfigAction = new AbstractAction() {
public void actionPerformed(ActionEvent e) {
if (sysConfigDialog == null)
makeSysConfigWindow();
sysConfigDialog.show();
}
};
BAMutil.setActionProperties( sysConfigAction, "Preferences", "Configure...", false, 'C', -1);
clearDebugFlagsAction = new AbstractAction() {
public void actionPerformed(ActionEvent e) { Debug.clear(); }
};
BAMutil.setActionProperties( clearDebugFlagsAction, null, "Clear DebugFlags", false, 'D', -1);
clearRecentAction = new AbstractAction() {
public void actionPerformed(ActionEvent e) {
recentDatasetList = new ArrayList();
}
};
BAMutil.setActionProperties( clearRecentAction, null, "Clear Recent Datasets", false, 'R', -1);
*/
AbstractAction clearDebugFlagsAction = new AbstractAction() {
public void actionPerformed(ActionEvent e) { /* Debug.clear(); */ }
};
BAMutil.setActionProperties(clearDebugFlagsAction, null, "Clear Debug Flags", false, 'D', -1);
/* AbstractAction setDebugFlagsAction = new AbstractAction() {
public void actionPerformed(ActionEvent e) {
// LOOK set netcdf debug flags
InvCatalogFactory.debugURL = Debug.isSet("InvCatalogFactory/debugURL");
InvCatalogFactory.debugOpen = Debug.isSet("InvCatalogFactory/debugOpen");
InvCatalogFactory.debugVersion = Debug.isSet("InvCatalogFactory/debugVersion");
InvCatalogFactory.showParsedXML = Debug.isSet("InvCatalogFactory/showParsedXML");
InvCatalogFactory.showStackTrace = Debug.isSet("InvCatalogFactory/showStackTrace");
InvCatalogFactory.debugXML = Debug.isSet("InvCatalogFactory/debugXML");
InvCatalogFactory.debugDBurl = Debug.isSet("InvCatalogFactory/debugDBurl");
InvCatalogFactory.debugXMLopen = Debug.isSet("InvCatalogFactory/debugXMLopen");
InvCatalogFactory.showCatalogXML = Debug.isSet("InvCatalogFactory/showCatalogXML");
}
};
BAMutil.setActionProperties(setDebugFlagsAction, null, "Set Debug Flags", false, 'S', -1); */
/* exitAction = new AbstractAction() {
public void actionPerformed(ActionEvent e) {
topLevel.close();
}
};
BAMutil.setActionProperties( exitAction, "Exit", "Exit", false, 'X', -1); */
} | java | {
"resource": ""
} |
q175585 | GempakFileReader.getByteOrder | test | public int getByteOrder(int kmachn) {
if ((kmachn == MTVAX) || (kmachn == MTULTX) || (kmachn == MTALPH)
|| (kmachn == MTLNUX) || (kmachn == MTIGPH)) {
return RandomAccessFile.LITTLE_ENDIAN;
}
return RandomAccessFile.BIG_ENDIAN;
} | java | {
"resource": ""
} |
q175586 | GempakFileReader.setByteOrder | test | void setByteOrder() {
String arch = System.getProperty("os.arch");
if (arch.equals("x86") || // Windows, Linux
arch.equals("arm") || // Window CE
arch.equals("x86_64") || // Windows64, Mac OS-X
arch.equals("amd64") || // Linux64?
arch.equals("alpha")) { // Utrix, VAX, DECOS
MTMACH = RandomAccessFile.LITTLE_ENDIAN;
} else {
MTMACH = RandomAccessFile.BIG_ENDIAN;
}
} | java | {
"resource": ""
} |
q175587 | GempakFileReader.findKey | test | public Key findKey(String name) {
if (keys == null) {
return null;
}
// search rows
for (Key key : keys.kkrow) {
if (key.name.equals(name)) {
return key;
}
}
// search columns
for (Key key : keys.kkcol) {
if (key.name.equals(name)) {
return key;
}
}
return null;
} | java | {
"resource": ""
} |
q175588 | GempakFileReader.findFileHeader | test | public DMFileHeaderInfo findFileHeader(String name) {
if ((fileHeaderInfo == null) || fileHeaderInfo.isEmpty()) {
return null;
}
for (DMFileHeaderInfo fhi : fileHeaderInfo) {
if (name.equals(fhi.kfhnam)) {
return fhi;
}
}
return null;
} | java | {
"resource": ""
} |
q175589 | GempakFileReader.getFileHeader | test | public float[] getFileHeader(String name) throws IOException {
DMFileHeaderInfo fh = findFileHeader(name);
if ((fh == null) || (fh.kfhtyp != MDREAL)) {
return null;
}
int knt = fileHeaderInfo.indexOf(fh); // 0 based
int iread = dmLabel.kpfile + 3 * dmLabel.kfhdrs;
for (int i = 0; i < knt; i++) {
DMFileHeaderInfo fhi = fileHeaderInfo.get(i);
iread = iread + fhi.kfhlen + 1;
}
int nword = DM_RINT(iread);
if (nword <= 0) {
logError("Invalid header length for " + name);
return null;
}
iread++;
float[] rheader = new float[nword];
if (name.equals("NAVB") && needToSwap) {
DM_RFLT(iread, 1, rheader, 0);
needToSwap = false;
iread++;
DM_RFLT(iread, 1, rheader, 1);
needToSwap = true;
iread++;
DM_RFLT(iread, nword - 2, rheader, 2);
} else {
DM_RFLT(iread, rheader);
}
return rheader;
} | java | {
"resource": ""
} |
q175590 | GempakFileReader.printParts | test | public void printParts() {
if (parts == null) {
return;
}
for (int i = 0; i < parts.size(); i++) {
System.out.println("\nParts[" + i + "]:");
System.out.println(parts.get(i));
}
} | java | {
"resource": ""
} |
q175591 | GempakFileReader.getDataPointer | test | public int getDataPointer(int irow, int icol, String partName) {
int ipoint = -1;
if ((irow < 1) || (irow > dmLabel.krow) || (icol < 1)
|| (icol > dmLabel.kcol)) {
System.out.println("bad row or column number: " + irow + "/"
+ icol);
return ipoint;
}
int iprt = getPartNumber(partName);
if (iprt == 0) {
System.out.println("couldn't find part");
return ipoint;
}
// gotta subtract 1 because parts are 1 but List is 0 based
DMPart part = parts.get(iprt - 1);
// check for valid data type
if ((part.ktyprt != MDREAL) && (part.ktyprt != MDGRID)
&& (part.ktyprt != MDRPCK)) {
System.out.println("Not a valid type");
return ipoint;
}
int ilenhd = part.klnhdr;
ipoint = dmLabel.kpdata + (irow - 1) * dmLabel.kcol * dmLabel.kprt
+ (icol - 1) * dmLabel.kprt + (iprt - 1);
return ipoint;
} | java | {
"resource": ""
} |
q175592 | GempakFileReader.DM_RFLT | test | public float DM_RFLT(int word) throws IOException {
if (rf == null) {
throw new IOException("DM_RFLT: no file to read from");
}
if (dmLabel == null) {
throw new IOException("DM_RFLT: reader not initialized");
}
rf.seek(getOffset(word));
if (needToSwap) {
// set the order
//if ((dmLabel.kmachn != MTMACH) &&
// ((dmLabel.kvmst && ! mvmst) ||
// (mvmst && !dmLabel.kvmst))) {
rf.order(RandomAccessFile.LITTLE_ENDIAN); // swap
} else {
rf.order(RandomAccessFile.BIG_ENDIAN);
}
float rdata = rf.readFloat();
if (RMISSD != dmLabel.smissd) {
if (Math.abs(rdata - dmLabel.smissd) < RDIFFD) {
rdata = RMISSD;
}
}
// reset to read normally
rf.order(RandomAccessFile.BIG_ENDIAN);
return rdata;
} | java | {
"resource": ""
} |
q175593 | GempakFileReader.DM_RSTR | test | public String DM_RSTR(int isword, int nchar) throws IOException {
if (rf == null) {
throw new IOException("DM_RSTR: no file to read from");
}
rf.seek(getOffset(isword));
return rf.readString(nchar);
} | java | {
"resource": ""
} |
q175594 | GempakFileReader.DM_UNPK | test | public float[] DM_UNPK(DMPart part, int[] ibitst) {
int nparms = part.kparms;
int nwordp = part.kwordp;
int npack = (ibitst.length - 1) / nwordp + 1;
if (npack * nwordp != ibitst.length) {
//logError("number of packed records not correct");
// System.out.println("number of packed records not correct: "
// + npack * nwordp + " vs. " + ibitst.length);
return null;
}
float[] data = new float[nparms * npack];
PackingInfo pkinf = part.packInfo;
int ir = 0;
int ii = 0;
for (int pack = 0; pack < npack; pack++) {
//
// Move bitstring into internal words. TODO: necessary?
//
int[] jdata = new int[nwordp];
System.arraycopy(ibitst, ii, jdata, 0, nwordp);
//
// Extract each data value.
//
for (int idata = 0; idata < nparms; idata++) {
//
// Extract correct bits from words using shift and mask
// operations.
//
int jbit = pkinf.nbitsc[idata];
int jsbit = pkinf.isbitc[idata];
int jshift = 1 - jsbit;
int jsword = pkinf.iswrdc[idata];
int jword = jdata[jsword];
// use >>> to shift avoid carrying sign along
int mask = mskpat >>> (32 - jbit);
int ifield = jword >>> Math.abs(jshift);
ifield = ifield & mask;
if ((jsbit + jbit - 1) > 32) {
jword = jdata[jsword + 1];
jshift = jshift + 32;
int iword = jword << jshift;
iword = iword & mask;
ifield = ifield | iword;
}
//
// The integer data is now in ifield. Use the scaling and
// offset terms to convert to REAL data.
//
if (ifield == pkinf.imissc[idata]) {
data[ir + idata] = RMISSD;
} else {
data[ir + idata] = (ifield + pkinf.koffst[idata])
* (float) pkinf.scalec[idata];
}
}
ir += nparms;
ii += nwordp;
}
return data;
} | java | {
"resource": ""
} |
q175595 | GempakFileReader.getBits | test | protected static String getBits(int b) {
Formatter s = new Formatter();
for (int i = 31; i >= 0; i--) {
if ((b & (1 << i)) != 0) {
s.format("1");
} else {
s.format("0");
}
if (i % 8 == 0) {
s.format("|");
}
}
return s.toString();
} | java | {
"resource": ""
} |
q175596 | N3channelWriter.writeToChannel | test | public static void writeToChannel(NetcdfFile ncfile, WritableByteChannel wbc) throws IOException, InvalidRangeException {
DataOutputStream stream = new DataOutputStream(new BufferedOutputStream( Channels.newOutputStream(wbc), 8000));
//DataOutputStream stream = new DataOutputStream(Channels.newOutputStream(wbc)); // buffering seems to improve by 5%
N3channelWriter writer = new N3channelWriter(ncfile);
int numrec = ncfile.getUnlimitedDimension() == null ? 0 : ncfile.getUnlimitedDimension().getLength();
writer.writeHeader(stream, numrec);
stream.flush();
writer.writeDataAll(wbc);
} | java | {
"resource": ""
} |
q175597 | DDSXMLParser.parseBase | test | private void parseBase(Element e, String indent)
throws DASException, NoSuchTypeException, BadSemanticsException {
parseLevel++;
String type = e.getName();
if (type.equals("Attribute")) {
// Do nothing here, the Attributes get parsed when the BaseType's
// get built. This conditional basically serves as a "trap" to
// ignore the <Attribute> tag.
} else if (type.equals("Alias")) {
// Do nothing here, the Aliases get parsed when the BaseType's
// get built. This conditional basically serves as a "trap" to
// ignore the <Alias> tag.
} else if (type.equals("dataBLOB")) {
// dataBLOB?
// The schema says that the href attribute is
// required for the dataBLOB element.
org.jdom2.Attribute hrefAttr = e.getAttribute("href");
// Since it's required we know that the getAttribute()
// method is not going to return null.
String contentID = hrefAttr.getValue();
if (_Debug) System.out.println("Found dataBLOB element. contentID=\"" + contentID + "\"");
dds.setBlobContentID(contentID);
} else {
// What's left must be a OPeNDAP BaseType
if (_Debug) System.out.println("Parsing new BaseType element. Parse level: " + parseLevel);
if (_Debug) showXMLElement(e, indent);
// Go get a new BaseType formed from this element
BaseType bt = newBaseType(e);
// Set it's parent.
// bt.setParent(parentDC);
// Add it to it's parent (container)
parentDC.addVariable(bt);
// Now we need to make sure this particular BaseType
// derived element isn't some special type that needs
// additional parsing:
// Is it a container?
if (bt instanceof DConstructor) {
// Up date the parsers state, (cache my parent)
DConstructor myParentDC = parentDC;
parentDC = (DConstructor) bt;
try {
// Grids are special containers, handle them
if (bt instanceof DGrid) {
parseGrid(e, indent);
} else {
// Otherwise, recurse on the children
for (Element child : e.getChildren()) {
parseBase(child, indent + " ");
}
}
} finally {
// restore my parent
parentDC = myParentDC;
}
} else if (bt instanceof DArray) {
// Array's are special, better build it if it is one
if (_Debug) System.out.println("Parsing Array instance. Array name: '" + bt.getClearName() + "'");
parseArray(e, (DArray) bt, indent);
}
}
parseLevel--;
} | java | {
"resource": ""
} |
q175598 | DDSXMLParser.parseGrid | test | private void parseGrid(Element gridElement, String indent)
throws DASException, NoSuchTypeException, BadSemanticsException {
parseLevel++;
// Grab the parent object (which better be a Grid!)
// just to elminate the hassle of casting everytime...
DGrid myGrid = (DGrid) parentDC;
if (_Debug) {
System.out.println("Parsing Grid Element: " + gridElement);
System.out.println("Grid Elements: ");
//showXMLElement(gridElement, indent);
for (Element element : gridElement.getChildren()) System.out.println(element);
}
// Get and parse the grid's Array element.
String eName = "Array";
if (_Debug) {
System.out.println("Parsing Array element.");
System.out.println("Asking for element: '" + eName + "' in namespace: '" + opendapNameSpace + "'");
}
Element arrayElement = gridElement.getChild(eName, opendapNameSpace);
if (_Debug) System.out.println("Got Array element: " + arrayElement);
DArray gridArray = (DArray) newBaseType(arrayElement);
parseArray(arrayElement, gridArray, indent + " ");
// Add it to the Grid
myGrid.addVariable(gridArray, DGrid.ARRAY);
// Get the Map elements
eName = "Map";
if (_Debug) {
System.out.println("Parsing Map elements.");
System.out.println("Asking for element: '" + eName + "' in namespace: '" + opendapNameSpace + "'");
}
List<Element> mapElements = gridElement.getChildren("Map", opendapNameSpace);
// Make sure the number of Map elements matches the dimension of the Grid Array.
if (mapElements.size() != gridArray.numDimensions())
throw new BadSemanticsException("Error in Grid syntax: " +
"The number of Map arrays must " +
"equal the number of dimensions " +
"of the data array.");
// Parse each Map element and poke it into the Grid.
for (Element mapElement : mapElements) {
DArray thisMap = (DArray) newBaseType(mapElement);
parseArray(mapElement, thisMap, indent + " ");
if (thisMap.numDimensions() != 1)
throw new BadSemanticsException("Error in Grid syntax: " +
"Maps may have only one dimension.");
myGrid.addVariable(thisMap, DGrid.MAPS);
}
parseLevel--;
} | java | {
"resource": ""
} |
q175599 | DDSXMLParser.parseAliases | test | private void parseAliases(Element e, String indent) throws DASException {
parseLevel++;
String subIndent = indent + " ";
if (_Debug) System.out.println(indent + "Parsing Aliases: ");
if (_Debug)
System.out.println(subIndent + "currentBT: " + currentBT.getTypeName() + " " + currentBT.getClearName());
// Get the Alias elements
for (Element aliasElement : e.getChildren("Alias", opendapNameSpace)) {
String name = null;
Attribute nameAttr = aliasElement.getAttribute("name");
// no need to check that the getAttribute call worked because the Schema enforces
// the presence of the "name" attribute for the <Alias> tag in the OPeNDAP namespace
name = nameAttr.getValue();
String attributeName = null;
Attribute attributeAttr = aliasElement.getAttribute("Attribute");
// no need to check that the getAttribute call worked because the Schema enforces
// the presence of the "Attribute" attribute for the <Alias> tag in the OPeNDAP namespace
attributeName = attributeAttr.getValue();
if (_Debug) {
System.out.println(subIndent + "The name '" + name +
"' is aliased to dds attribute: '" + attributeName + "'");
}
// Add the Alias to the appropriate container.
if (currentAT == null)
currentBT.addAttributeAlias(name, attributeName);
else
currentAT.addAlias(name, attributeName);
}
parseLevel--;
} | java | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.