_id
stringlengths
2
7
title
stringlengths
3
140
partition
stringclasses
3 values
text
stringlengths
73
34.1k
language
stringclasses
1 value
meta_information
dict
q175700
DirectoryBuilder.findIndex
test
public boolean findIndex() throws IOException { Path indexPath = Paths.get(dir.toString(), partitionName + suffix); if (Files.exists(indexPath)) { this.index = indexPath; BasicFileAttributes attr = Files.readAttributes(indexPath, BasicFileAttributes.class); this.indexLastModified = attr.lastModifiedTime(); this.indexSize = attr.size(); return true; } return false; }
java
{ "resource": "" }
q175701
DirectoryBuilder.isLeaf
test
private boolean isLeaf(IndexReader indexReader) throws IOException { if (partitionStatus == PartitionStatus.unknown) { int countDir=0, countFile=0, count =0; try (DirectoryStream<Path> dirStream = Files.newDirectoryStream(dir)) { Iterator<Path> iterator = dirStream.iterator(); while (iterator.hasNext() && count++ < 100) { Path p = iterator.next(); BasicFileAttributes attr = Files.readAttributes(p, BasicFileAttributes.class); if (attr.isDirectory()) countDir++; else countFile++; } } partitionStatus = (countFile > countDir) ? PartitionStatus.isLeaf : PartitionStatus.isDirectoryPartition; } return partitionStatus == PartitionStatus.isLeaf; }
java
{ "resource": "" }
q175702
DirectoryBuilder.constructChildren
test
public List<DirectoryBuilder> constructChildren(IndexReader indexReader, CollectionUpdateType forceCollection) throws IOException { if (childrenConstructed) return children; if (index != null && forceCollection == CollectionUpdateType.nocheck) { // use index if it exists constructChildrenFromIndex(indexReader, false); } else { scanForChildren(); } //once we have found children, we know that this is a time partition partitionStatus = (children.size() > 0) ? PartitionStatus.isDirectoryPartition : PartitionStatus.isLeaf; childrenConstructed = true; // otherwise we are good return children; }
java
{ "resource": "" }
q175703
DirectoryBuilder.scanForChildren
test
private void scanForChildren() { if (debug) System.out.printf("DirectoryBuilder.scanForChildren on %s ", dir); int count = 0; try (DirectoryStream<Path> ds = Files.newDirectoryStream(dir)) { for (Path p : ds) { BasicFileAttributes attr = Files.readAttributes(p, BasicFileAttributes.class); if (attr.isDirectory()) { children.add(new DirectoryBuilder(topCollectionName, p, attr, suffix)); if (debug && (++count % 10 == 0)) System.out.printf("%d ", count); } } } catch (IOException e) { e.printStackTrace(); } if (debug) System.out.printf("done=%d%n", count); childrenConstructed = true; }
java
{ "resource": "" }
q175704
DirectoryBuilder.readFilesFromIndex
test
public List<MFile> readFilesFromIndex(IndexReader indexReader) throws IOException { List<MFile> result = new ArrayList<>(100); if (index == null) return result; indexReader.readMFiles(index, result); return result; }
java
{ "resource": "" }
q175705
GridDataset.getGridsets
test
public List<ucar.nc2.dt.GridDataset.Gridset> getGridsets() { return new ArrayList<ucar.nc2.dt.GridDataset.Gridset>(gridsetHash.values()); }
java
{ "resource": "" }
q175706
FileCache.acquire
test
public FileCacheable acquire(FileFactory factory, DatasetUrl durl, ucar.nc2.util.CancelTask cancelTask) throws IOException { return acquire(factory, durl.trueurl, durl, -1, cancelTask, null); }
java
{ "resource": "" }
q175707
FileCache.remove
test
private void remove(CacheElement.CacheFile want) { want.remove(); files.remove(want.ncfile); try { want.ncfile.setFileCache(null); // unhook the caching want.ncfile.close(); } catch (IOException e) { log.error("close failed on "+want.ncfile.getLocation(), e); } want.ncfile = null; }
java
{ "resource": "" }
q175708
FileCache.eject
test
@Override public void eject(Object hashKey) { if (disabled.get()) return; // see if its in the cache CacheElement wantCacheElem = cache.get(hashKey); if (wantCacheElem == null) return; synchronized (wantCacheElem) { // synch in order to traverse the list for (CacheElement.CacheFile want : wantCacheElem.list) { // LOOK can we use remove(want); ?? files.remove(want.ncfile); try { want.ncfile.setFileCache(null); // unhook the caching want.ncfile.close(); // really close the file log.debug("close "+want.ncfile.getLocation()); } catch (IOException e) { log.error("close failed on "+want.ncfile.getLocation(), e); } want.ncfile = null; if (debugPrint) System.out.println(" FileCache " + name + " eject " + hashKey); } wantCacheElem.list.clear(); } cache.remove(hashKey); }
java
{ "resource": "" }
q175709
FileCache.release
test
@Override public boolean release(FileCacheable ncfile) throws IOException { if (ncfile == null) return false; if (disabled.get()) { ncfile.setFileCache(null); // prevent infinite loops ncfile.close(); return false; } // find it in the file cache CacheElement.CacheFile file = files.get(ncfile); // using hashCode of the FileCacheable if (file != null) { if (!file.isLocked.get()) { cacheLog.warn("FileCache " + name + " release " + ncfile.getLocation() + " not locked; hash= "+ncfile.hashCode()); } file.lastAccessed = System.currentTimeMillis(); file.countAccessed++; file.isLocked.set(false); file.ncfile.release(); if (cacheLog.isDebugEnabled()) cacheLog.debug("FileCache " + name + " release " + ncfile.getLocation()+"; hash= "+ncfile.hashCode()); if (debugPrint) System.out.printf(" FileCache %s release %s lock=%s count=%d%n", name, ncfile.getLocation(), file.isLocked.get(), countLocked()); return true; } return false; // throw new IOException("FileCache " + name + " release does not have file in cache = " + ncfile.getLocation()); }
java
{ "resource": "" }
q175710
BufrNumbers.int4
test
private static int int4(int a, int b, int c, int d) { // all bits set to ones if (a == 0xff && b == 0xff && c == 0xff && d == 0xff) return UNDEFINED; return (1 - ((a & 128) >> 6)) * ((a & 127) << 24 | b << 16 | c << 8 | d); }
java
{ "resource": "" }
q175711
FileCacheARC.updateInCache
test
private CacheElement updateInCache(CacheElement elem) { if (shadowCache.firstKey() == elem) return elem; elem.updateAccessed(); CacheElement prev = shadowCache.put(elem, elem); // faster if we could just insert at the top of the list. maybe we need to use LinkedList ? if (prev != null && (elem != prev)) { CacheElementComparator cc = new CacheElementComparator(); System.out.printf("elem != prev compare=%d%n", cc.compare(elem, prev)); System.out.printf("hash elem =%d prev=%d%n", elem.hashCode(), prev.hashCode()); } return elem; }
java
{ "resource": "" }
q175712
FileCacheARC.clearCache
test
public synchronized void clearCache(boolean force) { List<CacheElement.CacheFile> deleteList = new ArrayList<>(2 * cache.size()); if (force) { cache.clear(); // deletes everything from the cache deleteList.addAll(files.values()); // add everything to the delete list files.clear(); // counter.set(0); } else { // add unlocked files to the delete list, remove from files hash Iterator<CacheElement.CacheFile> iter = files.values().iterator(); while (iter.hasNext()) { CacheElement.CacheFile file = iter.next(); if (file.isLocked.compareAndSet(false, true)) { file.remove(); // remove from the containing CacheElement deleteList.add(file); iter.remove(); } } // remove empty cache elements for (CacheElement elem : cache.values()) { if (elem.list.size() == 0) cache.remove(elem.hashKey); } } // close all files in deleteList for (CacheElement.CacheFile file : deleteList) { if (force && file.isLocked.get()) cacheLog.warn("FileCacheARC " + name + " force close locked file= " + file); //counter.decrementAndGet(); if (file.ncfile == null) continue; try { file.ncfile.setFileCache(null); file.ncfile.close(); file.ncfile = null; // help the gc } catch (IOException e) { log.error("FileCacheARC " + name + " close failed on " + file); } } if (cacheLog.isDebugEnabled()) cacheLog.debug("*FileCacheARC " + name + " clearCache force= " + force + " deleted= " + deleteList.size() + " left=" + files.size()); //System.out.println("\n*NetcdfFileCache.clearCache force= " + force + " deleted= " + deleteList.size() + " left=" + counter.get()); }
java
{ "resource": "" }
q175713
GridCoordSys.makeVerticalTransform
test
void makeVerticalTransform(GridDataset gds, Formatter parseInfo) { if (vt != null) return; // already done if (vCT == null) return; // no vt vt = vCT.makeVerticalTransform(gds.getNetcdfDataset(), timeDim); if (vt == null) { if (parseInfo != null) parseInfo.format(" - ERR can't make VerticalTransform = %s%n", vCT.getVerticalTransformType()); } else { if (parseInfo != null) parseInfo.format(" - VerticalTransform = %s%n", vCT.getVerticalTransformType()); } }
java
{ "resource": "" }
q175714
GridCoordSys.isGlobalLon
test
@Override public boolean isGlobalLon() { if (!isLatLon) return false; if (!(horizXaxis instanceof CoordinateAxis1D)) return false; CoordinateAxis1D lon = (CoordinateAxis1D) horizXaxis; double first = lon.getCoordEdge(0); double last = lon.getCoordEdge((int) lon.getSize()); double min = Math.min(first, last); double max = Math.max(first, last); return (max - min) >= 360; }
java
{ "resource": "" }
q175715
GridCoordSys.isZPositive
test
@Override public boolean isZPositive() { if (vertZaxis == null) return false; if (vertZaxis.getPositive() != null) { return vertZaxis.getPositive().equalsIgnoreCase(ucar.nc2.constants.CF.POSITIVE_UP); } if (vertZaxis.getAxisType() == AxisType.Height) return true; return vertZaxis.getAxisType() != AxisType.Pressure; }
java
{ "resource": "" }
q175716
GridCoordSys.findXYindexFromCoord
test
@Override public int[] findXYindexFromCoord(double x_coord, double y_coord, int[] result) { if (result == null) result = new int[2]; if ((horizXaxis instanceof CoordinateAxis1D) && (horizYaxis instanceof CoordinateAxis1D)) { result[0] = ((CoordinateAxis1D) horizXaxis).findCoordElement(x_coord); result[1] = ((CoordinateAxis1D) horizYaxis).findCoordElement(y_coord); return result; } else if ((horizXaxis instanceof CoordinateAxis2D) && (horizYaxis instanceof CoordinateAxis2D)) { if (g2d == null) g2d = new GridCoordinate2D((CoordinateAxis2D) horizYaxis, (CoordinateAxis2D) horizXaxis); int[] result2 = new int[2]; boolean found = g2d.findCoordElement(y_coord, x_coord, result2); if (found) { result[0] = result2[1]; result[1] = result2[0]; } else { result[0] = -1; result[1] = -1; } return result; } // cant happen throw new IllegalStateException("GridCoordSystem.findXYindexFromCoord"); }
java
{ "resource": "" }
q175717
GridCoordSys.findXYindexFromCoordBounded
test
@Override public int[] findXYindexFromCoordBounded(double x_coord, double y_coord, int[] result) { if (result == null) result = new int[2]; if ((horizXaxis instanceof CoordinateAxis1D) && (horizYaxis instanceof CoordinateAxis1D)) { result[0] = ((CoordinateAxis1D) horizXaxis).findCoordElementBounded(x_coord); result[1] = ((CoordinateAxis1D) horizYaxis).findCoordElementBounded(y_coord); return result; } else if ((horizXaxis instanceof CoordinateAxis2D) && (horizYaxis instanceof CoordinateAxis2D)) { if (g2d == null) g2d = new GridCoordinate2D((CoordinateAxis2D) horizYaxis, (CoordinateAxis2D) horizXaxis); int[] result2 = new int[2]; g2d.findCoordElement(y_coord, x_coord, result2); // returns best guess result[0] = result2[1]; result[1] = result2[0]; return result; } // cant happen throw new IllegalStateException("GridCoordSystem.findXYindexFromCoord"); }
java
{ "resource": "" }
q175718
GridCoordSys.findXYindexFromLatLon
test
@Override public int[] findXYindexFromLatLon(double lat, double lon, int[] result) { Projection dataProjection = getProjection(); ProjectionPoint pp = dataProjection.latLonToProj(new LatLonPointImpl(lat, lon), new ProjectionPointImpl()); return findXYindexFromCoord(pp.getX(), pp.getY(), result); }
java
{ "resource": "" }
q175719
GridCoordSys.findXYindexFromLatLonBounded
test
@Override public int[] findXYindexFromLatLonBounded(double lat, double lon, int[] result) { Projection dataProjection = getProjection(); ProjectionPoint pp = dataProjection.latLonToProj(new LatLonPointImpl(lat, lon), new ProjectionPointImpl()); return findXYindexFromCoordBounded(pp.getX(), pp.getY(), result); }
java
{ "resource": "" }
q175720
MyTextField.getItemPos
test
protected int getItemPos() { if (nitems < 1) return -arrow_size; // dont show indicator else if (nitems == 1) return b.width/2; // indicator in center int item = table.getSelectedRowIndex(); // selected item int eff_width = b.width - 2*arrow_size; // effective width int pixel = (item * eff_width)/(nitems-1); // divided into n-1 intervals return pixel+arrow_size; }
java
{ "resource": "" }
q175721
MyTextField.getItem
test
protected int getItem( int pixel) { if (nitems < 2) return 0; int eff_width = b.width - 2*arrow_size; // effective width double fitem = ((double) (pixel-arrow_size)*(nitems-1)) / eff_width; int item = (int)(fitem+.5); item = Math.max( Math.min(item, nitems-1), 0); return item; }
java
{ "resource": "" }
q175722
DirectoryCollection.iterateOverMFileCollection
test
public void iterateOverMFileCollection(Visitor visit) throws IOException { if (debug) System.out.printf(" iterateOverMFileCollection %s ", collectionDir); int count = 0; try (DirectoryStream<Path> ds = Files.newDirectoryStream(collectionDir, new MyStreamFilter())) { for (Path p : ds) { try { BasicFileAttributes attr = Files.readAttributes(p, BasicFileAttributes.class); if (!attr.isDirectory()) visit.consume(new MFileOS7(p)); if (debug) System.out.printf("%d ", count++); } catch (IOException ioe) { // catch error and skip file logger.error("Failed to read attributes from file found in Files.newDirectoryStream ", ioe); } } } if (debug) System.out.printf("%d%n", count); }
java
{ "resource": "" }
q175723
TdsDownloader.getRemoteFiles
test
public void getRemoteFiles(final CancelTask _cancel) { this.cancel = _cancel; String urls = config.getServerPrefix() + "/thredds/admin/log/"+type+"/"; ta.append(String.format("Download URL = %s%n", urls)); String contents = null; try (HTTPMethod method = HTTPFactory.Get(session, urls)) { int statusCode = method.execute(); if (statusCode == 200) contents = method.getResponseAsString(); if ((contents == null) || (contents.length() == 0)) { ta.append(String.format("Failed to get logs at URL = %s%n%n", urls)); return; } else { ta.append(String.format("Logs at URL = %s%n%s%n", urls, contents)); } } catch (Throwable t) { ta.append(String.format("Failed to get logs at URL = %s error = %s%n%n", urls, t.getMessage())); t.printStackTrace(); return; } // update text area in background http://technobuz.com/2009/05/update-jtextarea-dynamically/ final String list = contents; SwingWorker worker = new SwingWorker<String, Void>() { @Override protected String doInBackground() throws Exception { try { ta.append(String.format("Local log files stored in = %s%n%n", localDir)); String[] lines = list.split("\n"); for (String line : lines) { new RemoteLog(line.trim()); if (cancel.isCancel()) { break; } } } catch (Throwable t) { t.printStackTrace(); } return null; } public void done() { if (cancel.isCancel()) ta.append(String.format("Download was cancelled for %s%n", type)); else ta.append(String.format("Download complete for %s%n", type)); } }; // do in background worker.execute(); }
java
{ "resource": "" }
q175724
FslHrrrLocalTables.getSubCenterName
test
@Override public String getSubCenterName(int center, int subcenter) { switch (subcenter) { case 0: return null; case 1: return "FSL/FRD Regional Analysis and Prediction Branch"; case 2: return "FSL/FRD Local Analysis and Prediction Branch"; } return super.getSubCenterName(center, subcenter); }
java
{ "resource": "" }
q175725
CalendarPeriod.fromUnitString
test
public static CalendarPeriod.Field fromUnitString(String udunit) { udunit = udunit.trim(); udunit = udunit.toLowerCase(); if (udunit.equals("s")) return Field.Second; if (udunit.equals("ms")) return Field.Millisec; // eliminate plurals if (udunit.endsWith("s")) udunit = udunit.substring(0, udunit.length()-1); switch (udunit) { case "second": case "sec": return Field.Second; case "millisecond": case "millisec": case "msec": return Field.Millisec; case "minute": case "min": return Field.Minute; case "hour": case "hr": case "h": return Field.Hour; case "day": case "d": return Field.Day; case "month": case "mon": return Field.Month; case "year": case "yr": return Field.Year; default: throw new IllegalArgumentException("cant convert " + udunit + " to CalendarPeriod"); } }
java
{ "resource": "" }
q175726
CalendarPeriod.of
test
public static CalendarPeriod of(int value, Field field) { CalendarPeriod want = new CalendarPeriod(value, field); if (cache == null) return want; CalendarPeriod got = cache.getIfPresent(want); if (got != null) return got; cache.put(want, want); return want; }
java
{ "resource": "" }
q175727
CalendarPeriod.of
test
public static CalendarPeriod of(String udunit) { int value; String units; String[] split = StringUtil2.splitString(udunit); if (split.length == 1) { value = 1; units = split[0]; } else if (split.length == 2) { try { value = Integer.parseInt(split[0]); } catch (Throwable t) { return null; } units = split[1]; } else return null; CalendarPeriod.Field unit = CalendarPeriod.fromUnitString(units); return CalendarPeriod.of(value, unit); }
java
{ "resource": "" }
q175728
CalendarPeriod.subtract
test
public int subtract(CalendarDate start, CalendarDate end) { long diff = end.getDifferenceInMsecs(start); int thislen = millisecs(); if ((diff % thislen != 0)) log.warn("roundoff error"); return (int) (diff / thislen); }
java
{ "resource": "" }
q175729
CalendarPeriod.getConvertFactor
test
public double getConvertFactor(CalendarPeriod from) { if (field == CalendarPeriod.Field.Month || field == CalendarPeriod.Field.Year) { log.warn(" CalendarDate.convert on Month or Year"); } return (double) from.millisecs() / millisecs(); }
java
{ "resource": "" }
q175730
CalendarPeriod.getValueInMillisecs
test
public double getValueInMillisecs() { if (field == CalendarPeriod.Field.Month) return 30.0 * 24.0 * 60.0 * 60.0 * 1000.0 * value; else if (field == CalendarPeriod.Field.Year) return 365.0 * 24.0 * 60.0 * 60.0 * 1000.0 * value; else return millisecs(); }
java
{ "resource": "" }
q175731
CalendarPeriod.getOffset
test
public int getOffset(CalendarDate start, CalendarDate end) { if (start.equals(end)) return 0; long start_millis = start.getDateTime().getMillis(); long end_millis = end.getDateTime().getMillis(); // 5 second slop Period p; if (start_millis < end_millis) p = new Period(start_millis, end_millis + 5000, getPeriodType()); else p = new Period(start_millis+5000, end_millis, getPeriodType()); return p.get(getDurationFieldType()); }
java
{ "resource": "" }
q175732
GempakGridServiceProvider.sync
test
public boolean sync() throws IOException { if ((gemreader.getInitFileSize() < raf.length()) && extendIndex) { gemreader.init(true); GridIndex index = gemreader.getGridIndex(); // reconstruct the ncfile objects ncfile.empty(); open(index, null); return true; } return false; }
java
{ "resource": "" }
q175733
GempakGridServiceProvider.initTables
test
private void initTables() { try { GempakGridParameterTable.addParameters( "resources/nj22/tables/gempak/wmogrib3.tbl"); GempakGridParameterTable.addParameters( "resources/nj22/tables/gempak/ncepgrib2.tbl"); } catch (Exception e) { System.out.println("unable to init tables"); } }
java
{ "resource": "" }
q175734
DataRootPathMatcher.put
test
private boolean put(DataRootExt dateRootExt) { map.put(dateRootExt.getPath(), dateRootExt); return treeSet.add(dateRootExt.getPath()); }
java
{ "resource": "" }
q175735
DataRootPathMatcher.findLongestPathMatch
test
public String findLongestPathMatch( String reqPath) { SortedSet<String> tail = treeSet.tailSet(reqPath); if (tail.isEmpty()) return null; String after = tail.first(); if (reqPath.startsWith( after)) // common case return tail.first(); // have to check more, until no common starting chars for (String key : tail) { if (reqPath.startsWith(key)) return key; // terminate when there's no match at all. if (StringUtil2.match(reqPath, key) == 0) break; } return null; }
java
{ "resource": "" }
q175736
DataRootPathMatcher.findDataRoot
test
public DataRoot findDataRoot( String reqPath) { String path = findLongestPathMatch(reqPath); if (path == null) return null; DataRootExt dataRootExt = map.get(path); if (dataRootExt == null) { logger.error("DataRootPathMatcher found path {} but not in map", path); return null; } return convert2DataRoot(dataRootExt); }
java
{ "resource": "" }
q175737
DataRootPathMatcher.convert2DataRoot
test
public @Nonnull DataRoot convert2DataRoot(DataRootExt dataRootExt) { DataRoot dataRoot = dataRootExt.getDataRoot(); if (dataRoot != null) return dataRoot; // otherwise must read the catalog that its in dataRoot = readDataRootFromCatalog(dataRootExt); dataRootExt.setDataRoot(dataRoot); return dataRoot; }
java
{ "resource": "" }
q175738
NcssParamsBean.getCalendarDateRange
test
public CalendarDateRange getCalendarDateRange(Calendar cal) { if (dateRange == null) return null; if (cal.equals(Calendar.getDefault())) return dateRange; // otherwise must reparse return makeCalendarDateRange(cal); }
java
{ "resource": "" }
q175739
CoordinateSharerBest.reindex
test
public List<Integer> reindex(List<Coordinate> coords) { List<Integer> result = new ArrayList<>(); for (Coordinate coord : coords) { Coordinate sub = swap.get(coord); Coordinate use = (sub == null) ? coord : sub; Integer idx = indexMap.get(use); // index into unionCoords if (idx == null) { throw new IllegalStateException(); } result.add(idx); } return result; }
java
{ "resource": "" }
q175740
NetcdfFileWriter.createNew
test
static public NetcdfFileWriter createNew(Version version, String location, Nc4Chunking chunker) throws IOException { return new NetcdfFileWriter(version, location, false, chunker); }
java
{ "resource": "" }
q175741
NetcdfFileWriter.addGroup
test
public Group addGroup(Group parent, String name) { if (!defineMode) throw new UnsupportedOperationException("not in define mode"); if (parent == null) return ncfile.getRootGroup(); Group result = new Group(ncfile, parent, name); parent.addGroup(result); return result; }
java
{ "resource": "" }
q175742
NetcdfFileWriter.addTypedef
test
public EnumTypedef addTypedef(Group g, EnumTypedef td) { if (!defineMode) throw new UnsupportedOperationException("not in define mode"); if (!version.isExtendedModel()) throw new IllegalArgumentException("Enum type only supported in extended model, this version is="+version); g.addEnumeration(td); return td; }
java
{ "resource": "" }
q175743
NetcdfFileWriter.deleteGroupAttribute
test
public Attribute deleteGroupAttribute(Group g, String attName) { if (!defineMode) throw new UnsupportedOperationException("not in define mode"); if (g == null) g = ncfile.getRootGroup(); Attribute att = g.findAttribute(attName); if (null == att) return null; g.remove(att); return att; }
java
{ "resource": "" }
q175744
NetcdfFileWriter.renameGroupAttribute
test
public Attribute renameGroupAttribute(Group g, String oldName, String newName) { if (!defineMode) throw new UnsupportedOperationException("not in define mode"); if (!isValidObjectName(newName)) { String newnewName = createValidObjectName(newName); log.warn("illegal attribute name= " + newName + " change to " + newnewName); newName = newnewName; } if (g == null) g = ncfile.getRootGroup(); Attribute att = g.findAttribute(oldName); if (null == att) return null; g.remove(att); att = new Attribute(newName, att.getValues()); g.addAttribute(att); return att; }
java
{ "resource": "" }
q175745
NetcdfFileWriter.addRecordStructure
test
public Structure addRecordStructure() { if (version != Version.netcdf3) return null; boolean ok = (Boolean) ncfile.sendIospMessage(NetcdfFile.IOSP_MESSAGE_ADD_RECORD_STRUCTURE); if (!ok) throw new IllegalStateException("can't add record variable"); return (Structure) ncfile.findVariable("record"); }
java
{ "resource": "" }
q175746
NetcdfFileWriter.abort
test
public void abort() throws java.io.IOException { if (spiw != null) { spiw.close(); spiw = null; } }
java
{ "resource": "" }
q175747
Bean.writeProperties
test
public void writeProperties(PrintWriter out) throws IOException { if (p == null) p = BeanParser.getParser( o.getClass()); p.writeProperties(o, out); }
java
{ "resource": "" }
q175748
ThreddsMetadataExtractor.extract
test
public ThreddsMetadata extract(Dataset threddsDataset) throws IOException { ThreddsMetadata metadata = new ThreddsMetadata(); Map<String, Object> flds = metadata.getFlds(); try ( DataFactory.Result result = new DataFactory().openFeatureDataset(threddsDataset, null)) { if (result.fatalError) { logger.warn(" openFeatureDataset failed, errs=%s%n", result.errLog); return null; } if (result.featureType.isCoverageFeatureType()) { GridDataset gridDataset = (GridDataset) result.featureDataset; // LOOK wrong flds.put(Dataset.GeospatialCoverage, extractGeospatial(gridDataset)); CalendarDateRange tc = extractCalendarDateRange(gridDataset); if (tc != null) flds.put(Dataset.TimeCoverage, tc); ThreddsMetadata.VariableGroup vars = extractVariables(threddsDataset.getDataFormatName(), gridDataset); if (vars != null) flds.put(Dataset.VariableGroups, vars); } else if (result.featureType.isPointFeatureType()) { PointDatasetImpl pobsDataset = (PointDatasetImpl) result.featureDataset; LatLonRect llbb = pobsDataset.getBoundingBox(); if (null != llbb) flds.put(Dataset.GeospatialCoverage, new ThreddsMetadata.GeospatialCoverage(llbb, null, 0.0, 0.0)); CalendarDateRange tc = extractCalendarDateRange(pobsDataset); if (tc != null) flds.put(Dataset.TimeCoverage, tc); ThreddsMetadata.VariableGroup vars = extractVariables(pobsDataset); if (vars != null) flds.put(Dataset.VariableGroups, vars); } } catch (IOException ioe) { logger.error("Error opening dataset " + threddsDataset.getName(), ioe); } return metadata; }
java
{ "resource": "" }
q175749
OffsetUnit.myDivideInto
test
@Override protected Unit myDivideInto(final Unit that) throws OperationException { return that instanceof OffsetUnit ? getUnit().divideInto(((OffsetUnit) that).getUnit()) : getUnit().divideInto(that); }
java
{ "resource": "" }
q175750
OffsetUnit.toDerivedUnit
test
public double toDerivedUnit(final double amount) throws ConversionException { if (!(_unit instanceof DerivableUnit)) { throw new ConversionException(this, getDerivedUnit()); } return ((DerivableUnit) getUnit()).toDerivedUnit(amount + getOffset()); }
java
{ "resource": "" }
q175751
OffsetUnit.fromDerivedUnit
test
public double fromDerivedUnit(final double amount) throws ConversionException { if (!(_unit instanceof DerivableUnit)) { throw new ConversionException(getDerivedUnit(), this); } return ((DerivableUnit) getUnit()).fromDerivedUnit(amount) - getOffset(); }
java
{ "resource": "" }
q175752
ConvertD2N.convertNestedVariable
test
public Array convertNestedVariable(ucar.nc2.Variable v, List<Range> section, DodsV dataV, boolean flatten) throws IOException, DAP2Exception { Array data = convertTopVariable(v, section, dataV); if (flatten) { ArrayStructure as = (ArrayStructure) data; // make list of names List<String> names = new ArrayList<>(); Variable nested = v; while (nested.isMemberOfStructure()) { names.add( 0, nested.getShortName()); nested = nested.getParentStructure(); } StructureMembers.Member m = findNested(as, names, v.getShortName()); Array mdata = m.getDataArray(); if (mdata instanceof ArraySequenceNested) { // gotta unroll ArraySequenceNested arraySeq = (ArraySequenceNested) mdata; return arraySeq.flatten(); } return mdata; } return data; }
java
{ "resource": "" }
q175753
ConvertD2N.convertTopVariable
test
public Array convertTopVariable(ucar.nc2.Variable v, List<Range> section, DodsV dataV) throws IOException, DAP2Exception { Array data = convert(dataV); // arrays if ((dataV.darray != null) && (dataV.bt instanceof DString)) { if (v.getDataType() == DataType.STRING) return convertStringArray(data, v); else if (v.getDataType() == DataType.CHAR) return convertStringArrayToChar(dataV.darray, v, section); else { String mess = "DODSVariable convertArray String invalid dataType= "+v.getDataType(); logger.error(mess); throw new IllegalArgumentException( mess); } } if ((dataV.bt instanceof DString) && (v.getDataType() == DataType.CHAR)) { // special case: convert String back to CHAR return convertStringToChar(data, v); } return data; /* else { // the DGrid case comes here also // create the array, using DODS internal array so there's no copying dods.dap.PrimitiveVector pv = dataV.darray.getPrimitiveVector(); Object storage = pv.getInternalStorage(); //storage = widenArray( pv, storage); // LOOK data conversion if needed int[] shape = (section == null) ? v.getShape() : Range.getShape(section); return Array.factory( v.getDataType().getPrimitiveClassType(), shape, storage); } */ }
java
{ "resource": "" }
q175754
ConvertD2N.convert
test
public Array convert(DodsV dataV) throws IOException, DAP2Exception { // scalars if (dataV.darray == null) { if (dataV.bt instanceof DStructure) { ArrayStructure structArray = makeArrayStructure( dataV); iconvertDataStructure( (DStructure) dataV.bt, structArray.getStructureMembers()); return structArray; } else if (dataV.bt instanceof DGrid) { throw new IllegalStateException( "DGrid without a darray"); } else if (dataV.bt instanceof DSequence) { ArrayStructure structArray = makeArrayStructure( dataV); iconvertDataSequenceArray( (DSequence) dataV.bt, structArray.getStructureMembers()); return structArray; } else { // scalar DataType dtype = dataV.getDataType(); Array scalarData = Array.factory( dtype, new int[0]); IndexIterator scalarIndex = scalarData.getIndexIterator(); iconvertDataPrimitiveScalar( dataV.bt, scalarIndex); return scalarData; } } // arrays if (dataV.darray != null) { if (dataV.bt instanceof DStructure) { ArrayStructure structArray = makeArrayStructure( dataV); iconvertDataStructureArray( dataV.darray, structArray.getStructureMembers()); return structArray; } else if (dataV.bt instanceof DString) { return convertStringArray(dataV.darray); } else { // the DGrid case comes here also // create the array, using DODS internal array so there's no copying opendap.dap.PrimitiveVector pv = dataV.darray.getPrimitiveVector(); Object storage = pv.getInternalStorage(); DataType dtype = dataV.getDataType(); return Array.factory( dtype, makeShape( dataV.darray), storage); } } String mess = "Unknown baseType "+dataV.bt.getClass().getName()+" name="+ dataV.getEncodedName(); logger.error(mess); throw new IllegalStateException(mess); }
java
{ "resource": "" }
q175755
MetadataManager.closeAll
test
static synchronized public void closeAll() { List<MetadataManager> closeDatabases = new ArrayList<>(openDatabases); for (MetadataManager mm : closeDatabases) { if (debug) System.out.println(" close database " + mm.collectionName); mm.close(); } openDatabases = new ArrayList<>(); // empty if (myEnv != null) { try { // Finally, close the store and environment. myEnv.close(); myEnv = null; logger.info("closed bdb caching"); } catch (DatabaseException dbe) { logger.error("Error closing bdb: ", dbe); } } }
java
{ "resource": "" }
q175756
MetadataManager.openDatabase
test
private synchronized void openDatabase() { if (database != null) return; DatabaseConfig dbConfig = new DatabaseConfig(); dbConfig.setReadOnly(readOnly); dbConfig.setAllowCreate(!readOnly); if (!readOnly) dbConfig.setDeferredWrite(true); database = myEnv.openDatabase(null, collectionName, dbConfig); openDatabases.add(this); }
java
{ "resource": "" }
q175757
CalendarDuration.convertToPeriod
test
static org.joda.time.Period convertToPeriod(int value, String udunit) { if (udunit.endsWith("s")) udunit = udunit.substring(0, udunit.length()-1); switch (udunit) { case "msec": return Period.millis(value); case "sec": return Period.seconds(value); case "minute": return Period.minutes(value); case "hour": case "hr": return Period.hours(value); case "day": return Period.days(value); case "week": return Period.weeks(value); case "month": return Period.months(value); case "year": return Period.years(value); } throw new IllegalArgumentException("cant convert "+ udunit +" to Joda Period"); }
java
{ "resource": "" }
q175758
ErddapStringArray.ensureCapacity
test
public void ensureCapacity(long minCapacity) { if (array.length < minCapacity) { //ensure minCapacity is < Integer.MAX_VALUE ErddapMath2.ensureArraySizeOkay(minCapacity, "StringArray"); //caller may know exact number needed, so don't double above 2x current size int newCapacity = (int)Math.min(Integer.MAX_VALUE - 1, array.length + (long)array.length); if (newCapacity < minCapacity) newCapacity = (int)minCapacity; //safe since checked above String[] newArray = new String[newCapacity]; System.arraycopy(array, 0, newArray, 0, size); array = newArray; //do last to minimize concurrency problems } }
java
{ "resource": "" }
q175759
ErddapStringArray.get
test
public String get(int index) { if (index >= size) throw new IllegalArgumentException(ErddapString2.ERROR + " in StringArray.get: index (" + index + ") >= size (" + size + ")."); return array[index]; }
java
{ "resource": "" }
q175760
InvDatasetScan.makeCatalogForDirectory
test
public InvCatalogImpl makeCatalogForDirectory( String orgPath, URI catURI ) { if ( log.isDebugEnabled()) { log.debug( "baseURI=" + catURI ); log.debug( "orgPath=" + orgPath ); log.debug( "rootPath=" + rootPath ); log.debug( "scanLocation=" + scanLocation ); } // Get the dataset path. String dsDirPath = translatePathToLocation( orgPath ); if ( dsDirPath == null ) { String tmpMsg = "makeCatalogForDirectory(): Requesting path <" + orgPath + "> must start with \"" + rootPath + "\"."; log.error( tmpMsg ); return null; } // Setup and create catalog builder. CatalogBuilder catBuilder = buildCatalogBuilder(); if ( catBuilder == null ) return null; // A very round about way to remove the filename (e.g., "catalog.xml"). // Note: Gets around "path separator at end of path" issues that are CrDs implementation dependant. // Note: Does not check that CrDs is allowed by filters. String dsPath = dsDirPath.substring( scanLocationCrDs.getPath().length() ); if ( dsPath.startsWith( "/" )) dsPath = dsPath.substring( 1 ); CrawlableDataset reqCrDs = scanLocationCrDs.getDescendant( dsPath ); CrawlableDataset parent = reqCrDs.getParentDataset(); if (parent == null) { log.error( "makeCatalogForDirectory(): I/O error getting parent crDs level <" + dsDirPath + ">: "); return null; } dsDirPath = parent.getPath(); // Get the CrawlableDataset for the desired catalog level (checks that allowed by filters). CrawlableDataset catalogCrDs; try { catalogCrDs = catBuilder.requestCrawlableDataset( dsDirPath ); } catch ( IOException e ) { log.error( "makeCatalogForDirectory(): I/O error getting catalog level <" + dsDirPath + ">: " + e.getMessage(), e ); return null; } if ( catalogCrDs == null ) { log.warn( "makeCatalogForDirectory(): requested catalog level <" + dsDirPath + "> not allowed (filtered out)."); return null; } if ( ! catalogCrDs.isCollection() ) { log.warn( "makeCatalogForDirectory(): requested catalog level <" + dsDirPath + "> is not a collection."); return null; } // Generate the desired catalog using the builder. InvCatalogImpl catalog; try { catalog = catBuilder.generateCatalog( catalogCrDs ); } catch ( IOException e ) { log.error( "makeCatalogForDirectory(): catalog generation failed <" + catalogCrDs.getPath() + ">: " + e.getMessage() ); return null; } // Set the catalog base URI. if ( catalog != null ) catalog.setBaseURI( catURI ); // InvDatasetImpl top = (InvDatasetImpl) catalog.getDataset(); // // if we name it carefully, can get catalogRef to useProxy == true (disappear top dataset) // if ( service.isRelativeBase()) { // pos = dataDir.lastIndexOf("/"); // String lastDir = (pos > 0) ? dataDir.substring(pos+1) : dataDir; // String topName = lastDir.length() > 0 ? lastDir : getName(); // top.setName( topName); // } return catalog; }
java
{ "resource": "" }
q175761
InvDatasetScan.makeProxyDsResolverCatalog
test
public InvCatalogImpl makeProxyDsResolverCatalog( String path, URI baseURI ) { if ( path == null ) return null; if ( path.endsWith( "/")) return null; // Get the dataset path. String dsDirPath = translatePathToLocation( path ); if ( dsDirPath == null ) { log.error( "makeProxyDsResolverCatalog(): Requesting path <" + path + "> must start with \"" + rootPath + "\"." ); return null; } // Split into parent path and dataset name. int pos = dsDirPath.lastIndexOf('/'); if ( pos == -1 ) { log.error( "makeProxyDsResolverCatalog(): Requesting path <" + path + "> must contain a slash (\"/\")." ); return null; } String dsName = dsDirPath.substring( pos + 1 ); dsDirPath = dsDirPath.substring( 0, pos ); // Find matching ProxyDatasetHandler. ProxyDatasetHandler pdh = this.getProxyDatasetHandlers().get( dsName ); if ( pdh == null ) { log.error( "makeProxyDsResolverCatalog(): No matching proxy dataset handler found <" + dsName + ">." ); return null; } // Setup and create catalog builder. CatalogBuilder catBuilder = buildCatalogBuilder(); if ( catBuilder == null ) return null; // Get the CrawlableDataset for the desired catalog level. CrawlableDataset catalogCrDs; try { catalogCrDs = catBuilder.requestCrawlableDataset( dsDirPath ); } catch ( IOException e ) { log.error( "makeProxyDsResolverCatalog(): failed to create CrawlableDataset for catalogLevel <" + dsDirPath + "> and class <" + crDsClassName + ">: " + e.getMessage(), e ); return null; } if ( catalogCrDs == null ) { log.warn( "makeProxyDsResolverCatalog(): requested catalog level <" + dsDirPath + "> not allowed (filtered out)." ); return null; } if ( ! catalogCrDs.isCollection()) { log.warn( "makeProxyDsResolverCatalog(): requested catalog level <" + dsDirPath + "> not a collection." ); return null; } // Generate the desired catalog using the builder. InvCatalogImpl catalog; try { catalog = (InvCatalogImpl) catBuilder.generateProxyDsResolverCatalog( catalogCrDs, pdh ); } catch ( IOException e ) { log.error( "makeProxyDsResolverCatalog(): catalog generation failed <" + catalogCrDs.getPath() + ">: " + e.getMessage() ); return null; } // Set the catalog base URI. if ( catalog != null ) catalog.setBaseURI( baseURI ); return catalog; }
java
{ "resource": "" }
q175762
AggregationNew.getCoordinateType
test
private DataType getCoordinateType() { List<Dataset> nestedDatasets = getDatasets(); DatasetOuterDimension first = (DatasetOuterDimension) nestedDatasets.get(0); return first.isStringValued ? DataType.STRING : DataType.DOUBLE; }
java
{ "resource": "" }
q175763
Grib2Pds.factory
test
@Nullable public static Grib2Pds factory(int template, byte[] input) { switch (template) { case 0: return new Grib2Pds0(input); case 1: return new Grib2Pds1(input); case 2: return new Grib2Pds2(input); case 5: return new Grib2Pds5(input); case 6: return new Grib2Pds6(input); case 8: return new Grib2Pds8(input); case 9: return new Grib2Pds9(input); case 10: return new Grib2Pds10(input); case 11: return new Grib2Pds11(input); case 12: return new Grib2Pds12(input); case 15: return new Grib2Pds15(input); case 30: return new Grib2Pds30(input); case 31: return new Grib2Pds31(input); case 48: return new Grib2Pds48(input); case 61: return new Grib2Pds61(input); default: log.warn("Missing template " + template); return null; } }
java
{ "resource": "" }
q175764
Grib2Pds.calcTime
test
protected CalendarDate calcTime(int startIndex) { int year = GribNumbers.int2(getOctet(startIndex++), getOctet(startIndex++)); int month = getOctet(startIndex++); int day = getOctet(startIndex++); int hour = getOctet(startIndex++); int minute = getOctet(startIndex++); int second = getOctet(startIndex++); if ((year == 0) && (month == 0) && (day == 0) && (hour == 0) && (minute == 0) && (second == 0)) return CalendarDate.UNKNOWN; // href.t00z.prob.f36.grib2 if (hour > 23) { day += (hour/24); hour = hour % 24; } return CalendarDate.of(null, year, month, day, hour, minute, second); }
java
{ "resource": "" }
q175765
Grib2Pds.applyScaleFactor
test
double applyScaleFactor(int scale, int value) { return ((scale == 0) || (scale == 255) || (value == 0)) ? value : value * Math.pow(10, -scale); }
java
{ "resource": "" }
q175766
EcmwfLocalConcepts.parseLocalConcept
test
private void parseLocalConcept(String filename, String conceptName) throws IOException { try (InputStream is = new FileInputStream(filename)) { addLocalConcept(is, conceptName); } }
java
{ "resource": "" }
q175767
EcmwfLocalConcepts.addLocalConcept
test
private void addLocalConcept(InputStream is, String conceptName) throws IOException { /* example entry from name.def: #Total precipitation of at least 5 mm 'Total precipitation of at least 5 mm' = { table2Version = 131 ; indicatorOfParameter = 61 ; } */ try (BufferedReader br = new BufferedReader(new InputStreamReader(is, ENCODING))) { String line = br.readLine(); while (!line.startsWith("#")) line = br.readLine(); // skip while (true) { HashMap<String, String> items = new HashMap<>(); line = br.readLine(); if (line == null) break; // done with the file if ((line.length() == 0) || line.startsWith("#")) continue; line = cleanLine(line); if (line.contains("{")) { String paramName = line.split("=")[0].trim(); line = br.readLine(); if (line == null) break; // done with the file line = cleanLine(line); while (line.contains("=")) { String[] kvp = line.split("="); items.put(kvp[0].trim(), kvp[1].trim()); line = br.readLine(); if (line == null) break; // done with the file line = cleanLine(line); } String tableVersion = items.get(TABLE_VERSION_ID); String parameterNumber = items.get(PARAM_NUM_ID); storeConcept(tableVersion, parameterNumber, conceptName, paramName); } } } }
java
{ "resource": "" }
q175768
EcmwfLocalConcepts.cleanLine
test
private String cleanLine(String lineIn) { String lineOut; lineOut = lineIn.replaceAll("'", ""); lineOut = lineOut.replaceAll("\t", ""); lineOut = lineOut.replaceAll(";", ""); return lineOut.trim(); }
java
{ "resource": "" }
q175769
EcmwfLocalConcepts.storeConcept
test
private void storeConcept(String tableVersion, String parameterNumber, String key, String value) { HashMap<String, HashMap<String, String>> tmpTable; if (localConcepts.containsKey(tableVersion)) { tmpTable = localConcepts.get(tableVersion); if (tmpTable.containsKey(parameterNumber)) { HashMap<String, String> tmpParam = tmpTable.get(parameterNumber); if (!tmpParam.containsKey(key)) { tmpParam.put(key, value); } else { System.out.println("already has key value pair: " + key + ":" + value); } } else { HashMap<String, String> tmpParam = new HashMap<>(4); tmpParam.put(key, value); tmpTable.put(parameterNumber, tmpParam); } } else { tmpTable = new HashMap<>(); HashMap<String, String> tmpParam = new HashMap<>(4); tmpParam.put(key, value); tmpTable.put(parameterNumber, tmpParam); } localConcepts.put(tableVersion, tmpTable); }
java
{ "resource": "" }
q175770
EcmwfLocalConcepts.writeGrib1Tables
test
private void writeGrib1Tables() throws IOException { SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz"); Calendar cal = Calendar.getInstance(); String writeDate = dateFormat.format(cal.getTime()); String grib1Info; List<String> tableNums = new ArrayList<>(); HashMap<String, String> paramInfo; Path dir = Paths.get(ecmwfLocalConceptsLoc.replace("sources/", "resources/resources/grib1/")); for (String tableNum : localConcepts.keySet()) { tableNums.add(tableNum); String fileName = "2.98." + tableNum + ".table"; System.out.println("Writing: " + fileName); Path newFile = dir.resolve(fileName); Files.deleteIfExists(newFile); Files.createFile(newFile); try (BufferedWriter writer = Files.newBufferedWriter(newFile, ENCODING)){ writer.write("# Generated by " + this.getClass().getCanonicalName() + " on " + writeDate); writer.newLine(); for(String paramNum : localConcepts.get(tableNum).keySet()){ paramInfo = localConcepts.get(tableNum).get(paramNum); String shortName = paramInfo.get(SHORTNAME_ID); String description = paramInfo.get(DESCRIPTION_ID); String units = paramInfo.get(UNIT_ID); grib1Info = paramNum + " " + shortName + " [" + description + "] (" + units + ")"; writer.write(grib1Info); writer.newLine(); } } } writeLookupTableFile(tableNums, dir, writeDate); }
java
{ "resource": "" }
q175771
EcmwfLocalConcepts.writeLookupTableFile
test
private void writeLookupTableFile(List<String> tableNums, Path dir, String writeDate) throws IOException { System.out.println("Writing: lookupTables.txt"); Collections.sort(tableNums); Path lookupTableReg = dir.resolve("lookupTables.txt"); Files.deleteIfExists(lookupTableReg); Files.createFile(lookupTableReg); try (BufferedWriter writer = Files.newBufferedWriter(lookupTableReg, ENCODING)){ writer.write("# Generated by " + this.getClass().getCanonicalName() + " on " + writeDate); writer.newLine(); for(String tn : tableNums){ String tableName = "2.98." + tn + ".table"; String reg = "98:\t-1:\t" + tn + ":\t" + tableName; writer.write(reg); writer.newLine(); } } }
java
{ "resource": "" }
q175772
EcmwfLocalConcepts.showLocalConcepts
test
private void showLocalConcepts() { for (String tableNum : localConcepts.keySet()) { for (String paramNum : localConcepts.get(tableNum).keySet()) { for (String key :localConcepts.get(tableNum).get(paramNum).keySet()) { System.out.println(key + ":" + localConcepts.get(tableNum).get(paramNum).get(key)); } } } }
java
{ "resource": "" }
q175773
EcmwfLocalConcepts.main
test
public static void main(String[] args) { EcmwfLocalConcepts ec = new EcmwfLocalConcepts(); try { ec.writeGrib1Tables(); System.out.println("Finished!"); } catch (IOException e) { e.printStackTrace(); } }
java
{ "resource": "" }
q175774
DatasetSourceType.getType
test
public static DatasetSourceType getType( String name) { if ( name == null) return null; return ( (DatasetSourceType) hash.get( name)); }
java
{ "resource": "" }
q175775
DatasetSource.expand
test
public InvDataset expand() throws IOException { // Get the new catalog being generated and its top-level dataset. this.resultingCatalog = this.createSkeletonCatalog( prefixUrlPath ); this.accessPointDataset = (InvDataset) this.resultingCatalog.getDatasets().get( 0); // IOException thrown by createSkeletonCatalog() so this check should not be necessary. if ( ! this.isCollection( this.accessPointDataset) ) { String tmpMsg = "The access point dataset <" + this.accessPointDataset.getName() + "> must be a collection dataset."; logger.warn( "expand(): {}", tmpMsg); throw new IOException( tmpMsg); } // Recurse into directory structure and expand. expandRecursive( this.accessPointDataset); // Finish the catalog. ((InvCatalogImpl) this.resultingCatalog).finish(); // Remove empty collection datasets. @todo HACK - should use filters instead. this.recursivelyRemoveEmptyCollectionDatasets( this.accessPointDataset); // Return the top-level dataset. return( this.accessPointDataset); }
java
{ "resource": "" }
q175776
DatasetSource.fullExpand
test
public InvCatalog fullExpand() throws IOException { logger.debug( "fullExpand(): expanding DatasetSource named \"{}\"", this.getName()); InvDataset topDs = this.expand(); InvCatalog generatedCat = topDs.getParentCatalog(); // Add metadata to all datasets. for ( Iterator it = this.getDatasetEnhancerList().iterator(); it.hasNext(); ) { DatasetEnhancer1 dsE = (DatasetEnhancer1) it.next(); dsE.addMetadata( topDs); } // Name all datasets. logger.debug( "fullExpand(): naming the datasets."); this.nameDatasets( (InvDatasetImpl) topDs ); // Sort all datasets logger.debug( "fullExpand(): sorting the datasets."); this.sortDatasets( topDs); // Return the generated catalog ((InvCatalogImpl) generatedCat).finish(); return( generatedCat); }
java
{ "resource": "" }
q175777
DatasetSource.nameDatasets
test
private void nameDatasets( InvDatasetImpl datasetContainer) { if ( this.getDatasetNamerList().isEmpty()) return; if ( this.isFlatten()) { logger.debug( "nameDatasets(): structure is FLAT calling nameDatasetList()"); this.nameDatasetList( datasetContainer); } else { logger.debug( "nameDatasets(): structure is DIRECTORY_TREE calling" + " nameDatasetTree() on each dataset in dataset container"); InvDatasetImpl curDs = null; for ( int j = 0; j < datasetContainer.getDatasets().size(); j++) { curDs = (InvDatasetImpl) datasetContainer.getDatasets().get( j); this.nameDatasetTree( curDs); } } return; }
java
{ "resource": "" }
q175778
DatasetSource.nameDatasetList
test
private void nameDatasetList( InvDatasetImpl dataset) { // Create temporary dataset in which to hold named datasets. InvDatasetImpl namedDs = new InvDatasetImpl( dataset, "nameDatastList() temp dataset", null, null, null); // InvDatasetImpl(parentDs, name, dataType, serviceName, urlPath) dataset.addDataset( namedDs); // Loop through the DatasetNamers DatasetNamer curNamer = null; for ( int i = 0; i < this.datasetNamerList.size(); i++) { curNamer = (DatasetNamer) this.datasetNamerList.get( i); logger.debug( "nameDatasetList(): trying namer ({})", curNamer.getName()); // If the current DatasetNamer adds a new level, create a new dataset. InvDatasetImpl addLevelDs = null; if ( curNamer.getAddLevel()) { addLevelDs = new InvDatasetImpl( null, curNamer.getName(), null, null, null ); } // Iterate over remaining unnamed datasets. InvDatasetImpl curDs = null; java.util.Iterator dsIter = dataset.getDatasets().iterator(); while ( dsIter.hasNext()) { curDs = (InvDatasetImpl) dsIter.next(); logger.debug( "nameDatasetList(): try namer on this ds ({}-{})", curDs.getName(), curDs.getUrlPath() ); // Try to name the current dataset. if ( curNamer.nameDataset( curDs)) { logger.debug( "nameDatasetList(): ds named ({})", curDs.getName()); // If adding a level, add named datasets to the added level dataset. if ( curNamer.getAddLevel()) { addLevelDs.addDataset( curDs); } // Otherwise, add the named datasets to namedDs. else { namedDs.addDataset( curDs); } // Remove the now-named dataset from list of unnamed datasets. dsIter.remove(); } } // END - InvDatasetImpl loop // If the namer added a level and a dataset was named by this namer, add the // new level to the list of named datasets. if ( curNamer.getAddLevel()) { if ( addLevelDs.hasNestedDatasets()) { namedDs.addDataset( addLevelDs); } } } // END - DatasetNamer loop namedDs.finish(); // Once all datasets are named (or unnamable with these DatasetNamers), // add all the datasets in namedDs back into the given containerDataset. if (logger.isDebugEnabled()) { logger.debug( "nameDatasetList(): number of unnamed datasets is " + dataset.getDatasets().size() + "."); logger.debug( "nameDatasetList(): add named datasets back to container."); } for ( int i = 0; i < namedDs.getDatasets().size(); i++) { dataset.addDataset( (InvDatasetImpl) namedDs.getDatasets().get( i)); } dataset.removeDataset( namedDs); return; }
java
{ "resource": "" }
q175779
DatasetSource.nameDatasetTree
test
private void nameDatasetTree( InvDatasetImpl dataset) { // If dataset does not have a name, try naming it with dsNamers. // @todo Rethink naming of directories (look at how DatasetFilter deals with collection vs atomic datasets). if ( dataset.getName().equals("") || ! dataset.hasAccess()) { logger.debug( "nameDatasetTree(): naming dataset ({})...", dataset.getUrlPath()); DatasetNamer dsN = null; for ( int i = 0; i < this.datasetNamerList.size(); i++) { dsN = (DatasetNamer) this.datasetNamerList.get( i); if ( dsN.nameDataset( dataset)) { logger.debug( "nameDatasetTree(): ... used namer ({})", dsN.getName()); break; } } } // Try to name any child datasets. InvDatasetImpl curDs = null; for ( int j = 0; j < dataset.getDatasets().size(); j++) { curDs = (InvDatasetImpl) dataset.getDatasets().get( j); logger.debug( "nameDatasetTree(): recurse to name child dataset ({})", curDs.getUrlPath()); this.nameDatasetTree( curDs); } return; }
java
{ "resource": "" }
q175780
ScalablePicture.loadPictureImd
test
public void loadPictureImd(URL imageUrl, double rotation) { Tools.log("loadPictureImd invoked with URL: " + imageUrl.toString()); if (sourcePicture != null) sourcePicture.removeListener(this); sourcePicture = new SourcePicture(); sourcePicture.addListener(this); setStatus(LOADING, "Loading: " + imageUrl.toString()); scaleAfterLoad = true; sourcePicture.loadPicture(imageUrl, rotation); }
java
{ "resource": "" }
q175781
ScalablePicture.stopLoadingExcept
test
public void stopLoadingExcept(URL url) { if (sourcePicture != null) { boolean isCurrentlyLoading = sourcePicture.stopLoadingExcept(url); if (!isCurrentlyLoading) { // sourcePicture.removeListener( this ); } PictureCache.stopBackgroundLoadingExcept(url); } }
java
{ "resource": "" }
q175782
ScalablePicture.sourceStatusChange
test
public void sourceStatusChange(int statusCode, String statusMessage, SourcePicture sp) { //Tools.log("ScalablePicture.sourceStatusChange: status received from SourceImage: " + statusMessage); switch (statusCode) { case SourcePicture.UNINITIALISED: Tools.log("ScalablePicture.sourceStatusChange: pictureStatus was: UNINITIALISED message: " + statusMessage); setStatus(UNINITIALISED, statusMessage); break; case SourcePicture.ERROR: Tools.log("ScalablePicture.sourceStatusChange: pictureStatus was: ERROR message: " + statusMessage); setStatus(ERROR, statusMessage); sourcePicture.removeListener(this); break; case SourcePicture.LOADING: Tools.log("ScalablePicture.sourceStatusChange: pictureStatus was: LOADING message: " + statusMessage); setStatus(LOADING, statusMessage); break; case SourcePicture.ROTATING: Tools.log("ScalablePicture.sourceStatusChange: pictureStatus was: ROTATING message: " + statusMessage); setStatus(LOADING, statusMessage); break; case SourcePicture.READY: Tools.log("ScalablePicture.sourceStatusChange: pictureStatus was: READY message: " + statusMessage); setStatus(LOADED, statusMessage); sourcePicture.removeListener(this); if (scaleAfterLoad) { createScaledPictureInThread(Thread.MAX_PRIORITY); scaleAfterLoad = false; } break; default: Tools.log("ScalablePicture.sourceStatusChange: Don't recognize this status: " + statusMessage); break; } }
java
{ "resource": "" }
q175783
ScalablePicture.sourceLoadProgressNotification
test
public void sourceLoadProgressNotification(int statusCode, int percentage) { Enumeration e = scalablePictureStatusListeners.elements(); while (e.hasMoreElements()) { ((ScalablePictureListener) e.nextElement()) .sourceLoadProgressNotification(statusCode, percentage); } }
java
{ "resource": "" }
q175784
ScalablePicture.createScaledPictureInThread
test
public void createScaledPictureInThread(int priority) { setStatus(SCALING, "Scaling picture."); ScaleThread t = new ScaleThread(this); t.setPriority(priority); t.start(); }
java
{ "resource": "" }
q175785
ScalablePicture.getScaledSize
test
public Dimension getScaledSize() { if (scaledPicture != null) return new Dimension(scaledPicture.getWidth(), scaledPicture.getHeight()); else return new Dimension(0, 0); }
java
{ "resource": "" }
q175786
ScalablePicture.getScaledSizeString
test
public String getScaledSizeString() { if (scaledPicture != null) return Integer.toString(scaledPicture.getWidth()) + " x " + Integer.toString(scaledPicture.getHeight()); else return "0 x 0"; }
java
{ "resource": "" }
q175787
GempakSoundingFileReader.getMergedParts
test
public List<String> getMergedParts() { List<String> list = new ArrayList<>(1); list.add(SNDT); return list; }
java
{ "resource": "" }
q175788
GempakSoundingFileReader.makeHeader
test
private String makeHeader(GempakStation stn, String date) { StringBuilder builder = new StringBuilder(); builder.append("STID = "); builder.append(StringUtil2.padRight((stn.getSTID().trim() + stn.getSTD2().trim()), 8)); builder.append("\t"); builder.append("STNM = "); builder.append(Format.i(stn.getSTNM(), 6)); builder.append("\t"); builder.append("TIME = "); builder.append(date); builder.append("\n"); builder.append("SLAT = "); builder.append(Format.d(stn.getLatitude(), 5)); builder.append("\t"); builder.append("SLON = "); builder.append(Format.d(stn.getLongitude(), 5)); builder.append("\t"); builder.append("SELV = "); builder.append(Format.d(stn.getAltitude(), 5)); builder.append("\n"); return builder.toString(); }
java
{ "resource": "" }
q175789
GempakSoundingFileReader.SN_CKUA
test
private List<String> SN_CKUA() { List<String> types = new ArrayList<>(); boolean above = false; boolean done = false; String partToCheck; while (!done) { // check for mandatory groups for (int group = 0; group < belowGroups.length; group++) { if (above) { partToCheck = aboveGroups[group]; } else { partToCheck = belowGroups[group]; } if (checkForValidGroup(partToCheck, parmLists[group])) { types.add(partToCheck); } } if (!above) { above = true; } else { done = true; } } return types; }
java
{ "resource": "" }
q175790
GempakSoundingFileReader.checkForValidGroup
test
private boolean checkForValidGroup(String partToCheck, String[] params) { DMPart part = getPart(partToCheck); if (part == null) { return false; } int i = 0; for (DMParam parm : part.params) { if (!(parm.kprmnm.equals(params[i++]))) { return false; } } return true; }
java
{ "resource": "" }
q175791
DiskCache.setRootDirectory
test
static public void setRootDirectory(String cacheDir) { if (!cacheDir.endsWith("/")) cacheDir = cacheDir + "/"; root = StringUtil2.replace(cacheDir, '\\', "/"); // no nasty backslash makeRootDirectory(); }
java
{ "resource": "" }
q175792
DiskCache.makeRootDirectory
test
static public void makeRootDirectory() { File dir = new File(root); if (!dir.exists()) if (!dir.mkdirs()) throw new IllegalStateException("DiskCache.setRootDirectory(): could not create root directory <" + root + ">."); checkExist = true; }
java
{ "resource": "" }
q175793
DiskCache.getCacheFile
test
static public File getCacheFile(String fileLocation) { File f = new File(makeCachePath(fileLocation)); if (f.exists()) { if (!f.setLastModified(System.currentTimeMillis())) logger.warn("Failed to setLastModified on " + f.getPath()); } if (!checkExist) { File dir = f.getParentFile(); if (!dir.exists() && !dir.mkdirs()) logger.warn("Failed to mkdirs on " + dir.getPath()); checkExist = true; } return f; }
java
{ "resource": "" }
q175794
DiskCache.cleanCache
test
static public void cleanCache(Date cutoff, StringBuilder sbuff) { if (sbuff != null) sbuff.append("CleanCache files before ").append(cutoff).append("\n"); File dir = new File(root); File[] children = dir.listFiles(); if (children == null) return; for (File file : children) { Date lastMod = new Date(file.lastModified()); if (lastMod.before(cutoff)) { boolean ret = file.delete(); if (sbuff != null) { sbuff.append(" delete ").append(file).append(" (").append(lastMod).append(")\n"); if (!ret) sbuff.append("Error deleting ").append(file).append("\n"); } } } }
java
{ "resource": "" }
q175795
DiskCache.cleanCache
test
static public void cleanCache(long maxBytes, Comparator<File> fileComparator, StringBuilder sbuff) { if (sbuff != null) sbuff.append("DiskCache clean maxBytes= ") .append(maxBytes).append("on dir ").append(root).append("\n"); File dir = new File(root); long total = 0, total_delete = 0; File[] files = dir.listFiles(); if (files != null) { List<File> fileList = Arrays.asList(files); Collections.sort(fileList, fileComparator); for (File file : fileList) { if (file.length() + total > maxBytes) { total_delete += file.length(); if (sbuff != null) sbuff.append(" delete ").append(file).append(" (") .append(file.length()).append(")\n"); if (!file.delete() && sbuff != null) sbuff.append("Error deleting ").append(file).append("\n"); } else { total += file.length(); } } } if (sbuff != null) { sbuff.append("Total bytes deleted= ").append(total_delete).append("\n"); sbuff.append("Total bytes left in cache= ").append(total).append("\n"); } }
java
{ "resource": "" }
q175796
Descriptor.isBufrTable
test
static public boolean isBufrTable(short fxy) { int f = (fxy & 0xC000) >> 14; int x = (fxy & 0x3F00) >> 8; int y = (fxy & 0xFF); return (f == 0) && (x == 0) && (y < 13); }
java
{ "resource": "" }
q175797
CoordinateAxis.factory
test
static public CoordinateAxis factory(NetcdfDataset ncd, VariableDS vds) { if ((vds.getRank() == 0) || (vds.getRank() == 1) || (vds.getRank() == 2 && vds.getDataType() == DataType.CHAR)) { return new CoordinateAxis1D(ncd, vds); } else if (vds.getRank() == 2) return new CoordinateAxis2D(ncd, vds); else return new CoordinateAxis(ncd, vds); }
java
{ "resource": "" }
q175798
CoordinateAxis.copyNoCache
test
public CoordinateAxis copyNoCache() { CoordinateAxis axis = new CoordinateAxis(ncd, getParentGroup(), getShortName(), getDataType(), getDimensionsString(), getUnitsString(), getDescription()); // other state axis.axisType = this.axisType; axis.boundaryRef = this.boundaryRef; axis.isContiguous = this.isContiguous; axis.positive = this.positive; axis.cache = new Variable.Cache(); // decouple cache return axis; }
java
{ "resource": "" }
q175799
CoordinateAxis.isNumeric
test
public boolean isNumeric() { return (getDataType() != DataType.CHAR) && (getDataType() != DataType.STRING) && (getDataType() != DataType.STRUCTURE); }
java
{ "resource": "" }