code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public static Deque<String> createScopeLookupList(UserTypeContainer container) { String namespace = container.getNamespace(); Deque<String> scopeLookupList = new ArrayDeque<>(); int end = 0; while (end >= 0) { end = namespace.indexOf('.', end); if (end >= 0) { end++; String scope = namespace.substring(0, end); scopeLookupList.addFirst(scope); } } return scopeLookupList; } }
public class class_name { public static Deque<String> createScopeLookupList(UserTypeContainer container) { String namespace = container.getNamespace(); Deque<String> scopeLookupList = new ArrayDeque<>(); int end = 0; while (end >= 0) { end = namespace.indexOf('.', end); // depends on control dependency: [while], data = [none] if (end >= 0) { end++; // depends on control dependency: [if], data = [none] String scope = namespace.substring(0, end); scopeLookupList.addFirst(scope); // depends on control dependency: [if], data = [none] } } return scopeLookupList; } }
public class class_name { public void entryUpdated (EntryUpdatedEvent<OccupantInfo> event) { // bail if this isn't for the OCCUPANT_INFO field if (!event.getName().equals(PlaceObject.OCCUPANT_INFO)) { return; } // now let the occupant observers know what's up final OccupantInfo info = event.getEntry(); final OccupantInfo oinfo = event.getOldEntry(); _observers.apply(new ObserverList.ObserverOp<OccupantObserver>() { public boolean apply (OccupantObserver observer) { observer.occupantUpdated(oinfo, info); return true; } }); } }
public class class_name { public void entryUpdated (EntryUpdatedEvent<OccupantInfo> event) { // bail if this isn't for the OCCUPANT_INFO field if (!event.getName().equals(PlaceObject.OCCUPANT_INFO)) { return; // depends on control dependency: [if], data = [none] } // now let the occupant observers know what's up final OccupantInfo info = event.getEntry(); final OccupantInfo oinfo = event.getOldEntry(); _observers.apply(new ObserverList.ObserverOp<OccupantObserver>() { public boolean apply (OccupantObserver observer) { observer.occupantUpdated(oinfo, info); return true; } }); } }
public class class_name { @CheckForNull public Float getFloat(String key) { String value = getString(key); if (StringUtils.isNotEmpty(value)) { try { return Float.valueOf(value); } catch (NumberFormatException e) { throw new IllegalStateException(String.format("The property '%s' is not a float value", key)); } } return null; } }
public class class_name { @CheckForNull public Float getFloat(String key) { String value = getString(key); if (StringUtils.isNotEmpty(value)) { try { return Float.valueOf(value); // depends on control dependency: [try], data = [none] } catch (NumberFormatException e) { throw new IllegalStateException(String.format("The property '%s' is not a float value", key)); } // depends on control dependency: [catch], data = [none] } return null; } }
public class class_name { static Log create(File outputFile) { try { return new Log( new PrintStream( outputFile.getName().endsWith(".gz") ? new GZIPOutputStream(new FileOutputStream(outputFile)) : new FileOutputStream(outputFile)), false); } catch (IOException e) { throw new InvalidOperationException("I/O error", e); } } }
public class class_name { static Log create(File outputFile) { try { return new Log( new PrintStream( outputFile.getName().endsWith(".gz") ? new GZIPOutputStream(new FileOutputStream(outputFile)) : new FileOutputStream(outputFile)), false); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new InvalidOperationException("I/O error", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Nullable public HtmlAttributeNode getDirectAttributeNamed(String attrName) { // the child at index 0 is the tag name for (int i = 1; i < numChildren(); i++) { StandaloneNode child = getChild(i); if (child instanceof HtmlAttributeNode) { HtmlAttributeNode attr = (HtmlAttributeNode) child; if (attr.definitelyMatchesAttributeName(attrName)) { return attr; } } } return null; } }
public class class_name { @Nullable public HtmlAttributeNode getDirectAttributeNamed(String attrName) { // the child at index 0 is the tag name for (int i = 1; i < numChildren(); i++) { StandaloneNode child = getChild(i); if (child instanceof HtmlAttributeNode) { HtmlAttributeNode attr = (HtmlAttributeNode) child; if (attr.definitelyMatchesAttributeName(attrName)) { return attr; // depends on control dependency: [if], data = [none] } } } return null; } }
public class class_name { public EClass getIfcGasTerminalType() { if (ifcGasTerminalTypeEClass == null) { ifcGasTerminalTypeEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(263); } return ifcGasTerminalTypeEClass; } }
public class class_name { public EClass getIfcGasTerminalType() { if (ifcGasTerminalTypeEClass == null) { ifcGasTerminalTypeEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(263); // depends on control dependency: [if], data = [none] } return ifcGasTerminalTypeEClass; } }
public class class_name { public final Entry<?> createEntryAt(ZonedDateTime time, Calendar calendar) { requireNonNull(time); VirtualGrid grid = getVirtualGrid(); if (grid != null) { ZonedDateTime timeA = grid.adjustTime(time, false, getFirstDayOfWeek()); ZonedDateTime timeB = grid.adjustTime(time, true, getFirstDayOfWeek()); if (Duration.between(time, timeA).abs().minus(Duration.between(time, timeB).abs()).isNegative()) { time = timeA; } else { time = timeB; } } if (calendar == null) { Callback<DateControl, Calendar> defaultCalendarProvider = getDefaultCalendarProvider(); calendar = defaultCalendarProvider.call(this); } if (calendar != null) { /* * We have to ensure that the calendar is visible, otherwise the new * entry would not be shown to the user. */ setCalendarVisibility(calendar, true); CreateEntryParameter param = new CreateEntryParameter(this, calendar, time); Callback<CreateEntryParameter, Entry<?>> factory = getEntryFactory(); Entry<?> entry = factory.call(param); if (entry != null) { /* * This is OK. The factory can return NULL. In this case we * assume that the application does not allow to create an entry * at the given location. */ entry.setCalendar(calendar); } return entry; } else { LoggingDomain.EDITING.warning("No calendar found for adding a new entry."); //$NON-NLS-1$ } return null; } }
public class class_name { public final Entry<?> createEntryAt(ZonedDateTime time, Calendar calendar) { requireNonNull(time); VirtualGrid grid = getVirtualGrid(); if (grid != null) { ZonedDateTime timeA = grid.adjustTime(time, false, getFirstDayOfWeek()); ZonedDateTime timeB = grid.adjustTime(time, true, getFirstDayOfWeek()); if (Duration.between(time, timeA).abs().minus(Duration.between(time, timeB).abs()).isNegative()) { time = timeA; // depends on control dependency: [if], data = [none] } else { time = timeB; // depends on control dependency: [if], data = [none] } } if (calendar == null) { Callback<DateControl, Calendar> defaultCalendarProvider = getDefaultCalendarProvider(); calendar = defaultCalendarProvider.call(this); // depends on control dependency: [if], data = [none] } if (calendar != null) { /* * We have to ensure that the calendar is visible, otherwise the new * entry would not be shown to the user. */ setCalendarVisibility(calendar, true); // depends on control dependency: [if], data = [(calendar] CreateEntryParameter param = new CreateEntryParameter(this, calendar, time); Callback<CreateEntryParameter, Entry<?>> factory = getEntryFactory(); // depends on control dependency: [if], data = [none] Entry<?> entry = factory.call(param); if (entry != null) { /* * This is OK. The factory can return NULL. In this case we * assume that the application does not allow to create an entry * at the given location. */ entry.setCalendar(calendar); // depends on control dependency: [if], data = [none] } return entry; // depends on control dependency: [if], data = [none] } else { LoggingDomain.EDITING.warning("No calendar found for adding a new entry."); //$NON-NLS-1$ // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public static boolean isForeignKey( NodeKey key, NodeKey rootKey ) { if (key == null) { return false; } String nodeWorkspaceKey = key.getWorkspaceKey(); boolean sameWorkspace = rootKey.getWorkspaceKey().equals(nodeWorkspaceKey); boolean sameSource = rootKey.getSourceKey().equalsIgnoreCase(key.getSourceKey()); return !sameWorkspace || !sameSource; } }
public class class_name { public static boolean isForeignKey( NodeKey key, NodeKey rootKey ) { if (key == null) { return false; // depends on control dependency: [if], data = [none] } String nodeWorkspaceKey = key.getWorkspaceKey(); boolean sameWorkspace = rootKey.getWorkspaceKey().equals(nodeWorkspaceKey); boolean sameSource = rootKey.getSourceKey().equalsIgnoreCase(key.getSourceKey()); return !sameWorkspace || !sameSource; } }
public class class_name { public synchronized void forceConfigProperties(List<ConfigProperty> newContents) { if (newContents != null) { this.configProperties = new ArrayList<ConfigProperty>(newContents); } else { this.configProperties = new ArrayList<ConfigProperty>(0); } } }
public class class_name { public synchronized void forceConfigProperties(List<ConfigProperty> newContents) { if (newContents != null) { this.configProperties = new ArrayList<ConfigProperty>(newContents); // depends on control dependency: [if], data = [(newContents] } else { this.configProperties = new ArrayList<ConfigProperty>(0); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected String translate(String messageKey, String defaultMessage) { if (messages == null || Strings.isNullOrEmpty(messageKey)) { return Strings.emptyToNull(defaultMessage); } return Strings.emptyToNull(messages.getWithDefault(messageKey, defaultMessage, defaultLanguage)); } }
public class class_name { protected String translate(String messageKey, String defaultMessage) { if (messages == null || Strings.isNullOrEmpty(messageKey)) { return Strings.emptyToNull(defaultMessage); // depends on control dependency: [if], data = [none] } return Strings.emptyToNull(messages.getWithDefault(messageKey, defaultMessage, defaultLanguage)); } }
public class class_name { public DependencyCustomizer ifAllResourcesPresent(String... paths) { return new DependencyCustomizer(this) { @Override protected boolean canAdd() { for (String path : paths) { try { if (DependencyCustomizer.this.loader.getResource(path) == null) { return false; } return true; } catch (Exception ex) { // swallow exception and continue } } return DependencyCustomizer.this.canAdd(); } }; } }
public class class_name { public DependencyCustomizer ifAllResourcesPresent(String... paths) { return new DependencyCustomizer(this) { @Override protected boolean canAdd() { for (String path : paths) { try { if (DependencyCustomizer.this.loader.getResource(path) == null) { return false; // depends on control dependency: [if], data = [none] } return true; // depends on control dependency: [try], data = [none] } catch (Exception ex) { // swallow exception and continue } // depends on control dependency: [catch], data = [none] } return DependencyCustomizer.this.canAdd(); } }; } }
public class class_name { private String getJQLDeclared() { ModelAnnotation inserAnnotation = this.getAnnotation(BindSqlInsert.class); ModelAnnotation updateAnnotation = this.getAnnotation(BindSqlUpdate.class); ModelAnnotation selectAnnotation = this.getAnnotation(BindSqlSelect.class); ModelAnnotation deleteAnnotation = this.getAnnotation(BindSqlDelete.class); String jql = null; int counter = 0; if (selectAnnotation != null) { jql = selectAnnotation.getAttribute(AnnotationAttributeType.JQL); if (StringUtils.hasText(jql)) { counter++; AssertKripton.assertTrue(selectAnnotation.getAttributeCount() > 1, "Annotation %s in method %s.%s have more than one annotation with JQL attribute", selectAnnotation.getSimpleName(), this.getParent().getName(), this.getName()); } } if (inserAnnotation != null) { jql = inserAnnotation.getAttribute(AnnotationAttributeType.JQL); if (StringUtils.hasText(jql)) { counter++; AssertKripton.assertTrue(inserAnnotation.getAttributeCount() > 1, "Annotation %s in method %s.%s have more than one annotation with JQL attribute", inserAnnotation.getSimpleName(), this.getParent().getName(), this.getName()); } } if (updateAnnotation != null) { jql = updateAnnotation.getAttribute(AnnotationAttributeType.JQL); if (StringUtils.hasText(jql)) { counter++; AssertKripton.assertTrue(updateAnnotation.getAttributeCount() > 1, "Annotation %s in method %s.%s have more than one annotation with JQL attribute", updateAnnotation.getSimpleName(), this.getParent().getName(), this.getName()); } } if (deleteAnnotation != null) { jql = deleteAnnotation.getAttribute(AnnotationAttributeType.JQL); if (StringUtils.hasText(jql)) { counter++; AssertKripton.assertTrue(deleteAnnotation.getAttributeCount() > 1, "Annotation %s in method %s.%s have more than one annotation with JQL attribute", deleteAnnotation.getSimpleName(), this.getParent().getName(), this.getName()); } } AssertKripton.assertTrue(counter <= 1, "Method %s.%s have more than one annotation with JQL attribute", this.getParent().getName(), this.getName()); // remove unscape charater (example \'%\' -> '%') jql = StringEscapeUtils.unescapeEcmaScript(jql); return jql; } }
public class class_name { private String getJQLDeclared() { ModelAnnotation inserAnnotation = this.getAnnotation(BindSqlInsert.class); ModelAnnotation updateAnnotation = this.getAnnotation(BindSqlUpdate.class); ModelAnnotation selectAnnotation = this.getAnnotation(BindSqlSelect.class); ModelAnnotation deleteAnnotation = this.getAnnotation(BindSqlDelete.class); String jql = null; int counter = 0; if (selectAnnotation != null) { jql = selectAnnotation.getAttribute(AnnotationAttributeType.JQL); // depends on control dependency: [if], data = [none] if (StringUtils.hasText(jql)) { counter++; // depends on control dependency: [if], data = [none] AssertKripton.assertTrue(selectAnnotation.getAttributeCount() > 1, "Annotation %s in method %s.%s have more than one annotation with JQL attribute", selectAnnotation.getSimpleName(), this.getParent().getName(), this.getName()); // depends on control dependency: [if], data = [none] } } if (inserAnnotation != null) { jql = inserAnnotation.getAttribute(AnnotationAttributeType.JQL); // depends on control dependency: [if], data = [none] if (StringUtils.hasText(jql)) { counter++; // depends on control dependency: [if], data = [none] AssertKripton.assertTrue(inserAnnotation.getAttributeCount() > 1, "Annotation %s in method %s.%s have more than one annotation with JQL attribute", inserAnnotation.getSimpleName(), this.getParent().getName(), this.getName()); // depends on control dependency: [if], data = [none] } } if (updateAnnotation != null) { jql = updateAnnotation.getAttribute(AnnotationAttributeType.JQL); // depends on control dependency: [if], data = [none] if (StringUtils.hasText(jql)) { counter++; // depends on control dependency: [if], data = [none] AssertKripton.assertTrue(updateAnnotation.getAttributeCount() > 1, "Annotation %s in method %s.%s have more than one annotation with JQL attribute", updateAnnotation.getSimpleName(), this.getParent().getName(), this.getName()); // depends on control dependency: [if], data = [none] } } if (deleteAnnotation != null) { jql = deleteAnnotation.getAttribute(AnnotationAttributeType.JQL); // depends on control dependency: [if], data = [none] if (StringUtils.hasText(jql)) { counter++; // depends on control dependency: [if], data = [none] AssertKripton.assertTrue(deleteAnnotation.getAttributeCount() > 1, "Annotation %s in method %s.%s have more than one annotation with JQL attribute", deleteAnnotation.getSimpleName(), this.getParent().getName(), this.getName()); // depends on control dependency: [if], data = [none] } } AssertKripton.assertTrue(counter <= 1, "Method %s.%s have more than one annotation with JQL attribute", this.getParent().getName(), this.getName()); // remove unscape charater (example \'%\' -> '%') jql = StringEscapeUtils.unescapeEcmaScript(jql); return jql; } }
public class class_name { public static boolean isNonAmbNucleotideSequence(String sequence) { sequence = SequenceUtil.cleanSequence(sequence); if (SequenceUtil.DIGIT.matcher(sequence).find()) { return false; } if (SequenceUtil.NON_NUCLEOTIDE.matcher(sequence).find()) { return false; /* * System.out.format("I found the text starting at " + * "index %d and ending at index %d.%n", nonDNAmatcher .start(), * nonDNAmatcher.end()); */ } final Matcher DNAmatcher = SequenceUtil.NUCLEOTIDE.matcher(sequence); return DNAmatcher.find(); } }
public class class_name { public static boolean isNonAmbNucleotideSequence(String sequence) { sequence = SequenceUtil.cleanSequence(sequence); if (SequenceUtil.DIGIT.matcher(sequence).find()) { return false; // depends on control dependency: [if], data = [none] } if (SequenceUtil.NON_NUCLEOTIDE.matcher(sequence).find()) { return false; // depends on control dependency: [if], data = [none] /* * System.out.format("I found the text starting at " + * "index %d and ending at index %d.%n", nonDNAmatcher .start(), * nonDNAmatcher.end()); */ } final Matcher DNAmatcher = SequenceUtil.NUCLEOTIDE.matcher(sequence); return DNAmatcher.find(); } }
public class class_name { private void timeoutHasExpired(final Timeout timeout, final TimeoutExpirationContext context) { try { addMessage(timeout, context.getOriginalHeaders()); } catch (Exception ex) { LOG.error("Error handling timeout {}", timeout, ex); } } }
public class class_name { private void timeoutHasExpired(final Timeout timeout, final TimeoutExpirationContext context) { try { addMessage(timeout, context.getOriginalHeaders()); // depends on control dependency: [try], data = [none] } catch (Exception ex) { LOG.error("Error handling timeout {}", timeout, ex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void persistWrite() throws IOException { if (diskCache2 == null) return; String cacheName = getCacheName(); if (cacheName == null) return; if (cacheName.startsWith("file:")) // LOOK cacheName = cacheName.substring(5); File cacheFile = diskCache2.getCacheFile(cacheName); if (cacheFile == null) throw new IllegalStateException(); // only write out if something changed after the cache file was last written, or if the file has been deleted if (!cacheDirty && cacheFile.exists()) return; FileChannel channel = null; try { File dir = cacheFile.getParentFile(); if (!dir.exists()) { if (!dir.mkdirs()) logger.error("Cant make cache directory= " + cacheFile); } // Get a file channel for the file FileOutputStream fos = new FileOutputStream(cacheFile); channel = fos.getChannel(); // Try acquiring the lock without blocking. This method returns // null or throws an exception if the file is already locked. FileLock lock; try { lock = channel.tryLock(); } catch (OverlappingFileLockException e) { // File is already locked in this thread or virtual machine return; // give up } if (lock == null) return; PrintWriter out = new PrintWriter(new OutputStreamWriter(fos, CDM.utf8Charset)); out.print("<?xml version='1.0' encoding='UTF-8'?>\n"); out.print("<aggregation xmlns='http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2' version='3' "); out.print("type='" + type + "' "); if (dimName != null) out.print("dimName='" + dimName + "' "); if (datasetManager.getRecheck() != null) out.print("recheckEvery='" + datasetManager.getRecheck() + "' "); out.print(">\n"); List<Dataset> nestedDatasets = getDatasets(); for (Dataset dataset : nestedDatasets) { DatasetOuterDimension dod = (DatasetOuterDimension) dataset; if (dod.getId() == null) logger.warn("id is null"); out.print(" <netcdf id='" + dod.getId() + "' "); out.print("ncoords='" + dod.getNcoords(null) + "' >\n"); for (CacheVar pv : cacheList) { Array data = pv.getData(dod.getId()); if (data != null) { out.print(" <cache varName='" + pv.varName + "' >"); while (data.hasNext()) out.printf("%s ", data.next()); out.print("</cache>\n"); if (logger.isDebugEnabled()) logger.debug(" wrote array = " + pv.varName + " nelems= " + data.getSize() + " for " + dataset.getLocation()); } } out.print(" </netcdf>\n"); } out.print("</aggregation>\n"); out.close(); // this also closes the channel and releases the lock long time = datasetManager.getLastScanned(); if (time == 0) time = System.currentTimeMillis(); // no scans (eg all static) will have a 0 if (!cacheFile.setLastModified(time)) logger.warn("FAIL to set lastModified on {}", cacheFile.getPath()); cacheDirty = false; if (logger.isDebugEnabled()) logger.debug("Aggregation persisted = " + cacheFile.getPath() + " lastModified= " + new Date(datasetManager.getLastScanned())); } finally { if (channel != null) channel.close(); } } }
public class class_name { public void persistWrite() throws IOException { if (diskCache2 == null) return; String cacheName = getCacheName(); if (cacheName == null) return; if (cacheName.startsWith("file:")) // LOOK cacheName = cacheName.substring(5); File cacheFile = diskCache2.getCacheFile(cacheName); if (cacheFile == null) throw new IllegalStateException(); // only write out if something changed after the cache file was last written, or if the file has been deleted if (!cacheDirty && cacheFile.exists()) return; FileChannel channel = null; try { File dir = cacheFile.getParentFile(); if (!dir.exists()) { if (!dir.mkdirs()) logger.error("Cant make cache directory= " + cacheFile); } // Get a file channel for the file FileOutputStream fos = new FileOutputStream(cacheFile); channel = fos.getChannel(); // Try acquiring the lock without blocking. This method returns // null or throws an exception if the file is already locked. FileLock lock; try { lock = channel.tryLock(); // depends on control dependency: [try], data = [none] } catch (OverlappingFileLockException e) { // File is already locked in this thread or virtual machine return; // give up } // depends on control dependency: [catch], data = [none] if (lock == null) return; PrintWriter out = new PrintWriter(new OutputStreamWriter(fos, CDM.utf8Charset)); out.print("<?xml version='1.0' encoding='UTF-8'?>\n"); out.print("<aggregation xmlns='http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2' version='3' "); out.print("type='" + type + "' "); if (dimName != null) out.print("dimName='" + dimName + "' "); if (datasetManager.getRecheck() != null) out.print("recheckEvery='" + datasetManager.getRecheck() + "' "); out.print(">\n"); List<Dataset> nestedDatasets = getDatasets(); for (Dataset dataset : nestedDatasets) { DatasetOuterDimension dod = (DatasetOuterDimension) dataset; if (dod.getId() == null) logger.warn("id is null"); out.print(" <netcdf id='" + dod.getId() + "' "); // depends on control dependency: [for], data = [none] out.print("ncoords='" + dod.getNcoords(null) + "' >\n"); // depends on control dependency: [for], data = [none] for (CacheVar pv : cacheList) { Array data = pv.getData(dod.getId()); if (data != null) { out.print(" <cache varName='" + pv.varName + "' >"); // depends on control dependency: [if], data = [none] while (data.hasNext()) out.printf("%s ", data.next()); out.print("</cache>\n"); // depends on control dependency: [if], data = [none] if (logger.isDebugEnabled()) logger.debug(" wrote array = " + pv.varName + " nelems= " + data.getSize() + " for " + dataset.getLocation()); } } out.print(" </netcdf>\n"); // depends on control dependency: [for], data = [none] } out.print("</aggregation>\n"); out.close(); // this also closes the channel and releases the lock long time = datasetManager.getLastScanned(); if (time == 0) time = System.currentTimeMillis(); // no scans (eg all static) will have a 0 if (!cacheFile.setLastModified(time)) logger.warn("FAIL to set lastModified on {}", cacheFile.getPath()); cacheDirty = false; if (logger.isDebugEnabled()) logger.debug("Aggregation persisted = " + cacheFile.getPath() + " lastModified= " + new Date(datasetManager.getLastScanned())); } finally { if (channel != null) channel.close(); } } }
public class class_name { public Expectation[] retrieveActiveExpectations(HttpRequest httpRequest) { String activeExpectations = retrieveActiveExpectations(httpRequest, Format.JSON); if (!Strings.isNullOrEmpty(activeExpectations) && !activeExpectations.equals("[]")) { return expectationSerializer.deserializeArray(activeExpectations); } else { return new Expectation[0]; } } }
public class class_name { public Expectation[] retrieveActiveExpectations(HttpRequest httpRequest) { String activeExpectations = retrieveActiveExpectations(httpRequest, Format.JSON); if (!Strings.isNullOrEmpty(activeExpectations) && !activeExpectations.equals("[]")) { return expectationSerializer.deserializeArray(activeExpectations); // depends on control dependency: [if], data = [none] } else { return new Expectation[0]; // depends on control dependency: [if], data = [none] } } }
public class class_name { public boolean shouldClose(TaskAttemptID taskid) { /** * If the task hasn't been closed yet, and it belongs to a completed * TaskInProgress close it. * * However, for completed map tasks we do not close the task which * actually was the one responsible for _completing_ the TaskInProgress. */ if (tasksReportedClosed.contains(taskid)) { if (tasksToKill.keySet().contains(taskid)) return true; else return false; } boolean close = false; TaskStatus ts = taskStatuses.get(taskid); if ((ts != null) && ((this.failed) || ((job.getStatus().getRunState() != JobStatus.RUNNING && (job.getStatus().getRunState() != JobStatus.PREP))))) { tasksReportedClosed.add(taskid); close = true; } else if ((completes > 0) && // isComplete() is synchronized! !(isMapTask() && !jobSetup && !jobCleanup && isComplete(taskid))) { tasksReportedClosed.add(taskid); close = true; } else if (isCommitPending(taskid) && !shouldCommit(taskid)) { tasksReportedClosed.add(taskid); close = true; } else { close = tasksToKill.keySet().contains(taskid); } return close; } }
public class class_name { public boolean shouldClose(TaskAttemptID taskid) { /** * If the task hasn't been closed yet, and it belongs to a completed * TaskInProgress close it. * * However, for completed map tasks we do not close the task which * actually was the one responsible for _completing_ the TaskInProgress. */ if (tasksReportedClosed.contains(taskid)) { if (tasksToKill.keySet().contains(taskid)) return true; else return false; } boolean close = false; TaskStatus ts = taskStatuses.get(taskid); if ((ts != null) && ((this.failed) || ((job.getStatus().getRunState() != JobStatus.RUNNING && (job.getStatus().getRunState() != JobStatus.PREP))))) { tasksReportedClosed.add(taskid); // depends on control dependency: [if], data = [none] close = true; // depends on control dependency: [if], data = [none] } else if ((completes > 0) && // isComplete() is synchronized! !(isMapTask() && !jobSetup && !jobCleanup && isComplete(taskid))) { tasksReportedClosed.add(taskid); // depends on control dependency: [if], data = [none] close = true; // depends on control dependency: [if], data = [none] } else if (isCommitPending(taskid) && !shouldCommit(taskid)) { tasksReportedClosed.add(taskid); // depends on control dependency: [if], data = [none] close = true; // depends on control dependency: [if], data = [none] } else { close = tasksToKill.keySet().contains(taskid); // depends on control dependency: [if], data = [none] } return close; } }
public class class_name { public static final void removeMin(long[] heapKeys, double[] heapValues, int heapSize) { heapValues[0] = heapValues[heapSize - 1]; heapKeys[0] = heapKeys[heapSize - 1]; int curIndex = 0; int leftIndex, rightIndex, minIndex; boolean done = false; while (!done) { done = true; leftIndex = 1 + (curIndex * 2); rightIndex = leftIndex + 1; minIndex = -1; if (rightIndex < heapSize) { minIndex = heapValues[leftIndex] <= heapValues[rightIndex] ? leftIndex : rightIndex; } else if (leftIndex < heapSize) { minIndex = leftIndex; } if (minIndex != -1 && heapValues[minIndex] < heapValues[curIndex]) { swap(heapKeys, heapValues, curIndex, minIndex); done = false; curIndex = minIndex; } } } }
public class class_name { public static final void removeMin(long[] heapKeys, double[] heapValues, int heapSize) { heapValues[0] = heapValues[heapSize - 1]; heapKeys[0] = heapKeys[heapSize - 1]; int curIndex = 0; int leftIndex, rightIndex, minIndex; boolean done = false; while (!done) { done = true; // depends on control dependency: [while], data = [none] leftIndex = 1 + (curIndex * 2); // depends on control dependency: [while], data = [none] rightIndex = leftIndex + 1; // depends on control dependency: [while], data = [none] minIndex = -1; // depends on control dependency: [while], data = [none] if (rightIndex < heapSize) { minIndex = heapValues[leftIndex] <= heapValues[rightIndex] ? leftIndex : rightIndex; // depends on control dependency: [if], data = [none] } else if (leftIndex < heapSize) { minIndex = leftIndex; // depends on control dependency: [if], data = [none] } if (minIndex != -1 && heapValues[minIndex] < heapValues[curIndex]) { swap(heapKeys, heapValues, curIndex, minIndex); // depends on control dependency: [if], data = [none] done = false; // depends on control dependency: [if], data = [none] curIndex = minIndex; // depends on control dependency: [if], data = [none] } } } }
public class class_name { public StylesheetComposed getStylesheetComposed() { Stylesheet sheet = this; while (!sheet.isAggregatedType()) { sheet = sheet.getStylesheetParent(); } return (StylesheetComposed) sheet; } }
public class class_name { public StylesheetComposed getStylesheetComposed() { Stylesheet sheet = this; while (!sheet.isAggregatedType()) { sheet = sheet.getStylesheetParent(); // depends on control dependency: [while], data = [none] } return (StylesheetComposed) sheet; } }
public class class_name { private void loadReadOnly() { checkState(_segmentMap.isEmpty()); // Only supported for newly allocated queue objects checkState(_readOnly); // Load the segment persistent state from disk. for (Map.Entry<UUID, String> entry : _dao.loadSegments(_name).entrySet()) { UUID id = entry.getKey(); Segment seg = newSegmentFromSnapshot(id, entry.getValue()); // Don't bother sorting segments, adjusting min or removing overlap. They're not necessary // when loading a read-only persistent queue. while (_segmentMap.containsKey(seg.getMin())) { seg.setMin(successor(seg.getMin())); } _segmentMap.put(seg.getMin(), seg); } } }
public class class_name { private void loadReadOnly() { checkState(_segmentMap.isEmpty()); // Only supported for newly allocated queue objects checkState(_readOnly); // Load the segment persistent state from disk. for (Map.Entry<UUID, String> entry : _dao.loadSegments(_name).entrySet()) { UUID id = entry.getKey(); Segment seg = newSegmentFromSnapshot(id, entry.getValue()); // Don't bother sorting segments, adjusting min or removing overlap. They're not necessary // when loading a read-only persistent queue. while (_segmentMap.containsKey(seg.getMin())) { seg.setMin(successor(seg.getMin())); // depends on control dependency: [while], data = [none] } _segmentMap.put(seg.getMin(), seg); // depends on control dependency: [for], data = [none] } } }
public class class_name { @RequestMapping(value="/{subscription}", method=POST) public @ResponseBody String receiveUpdate( @PathVariable("subscription") String subscription, @RequestBody String payload, @RequestHeader(X_HUB_SIGNATURE) String signature) throws Exception { // Can only read body once and we need it as a raw String to calculate the signature. // Therefore, use Jackson ObjectMapper to give us a RealTimeUpdate object from that raw String. RealTimeUpdate update = new ObjectMapper().readValue(payload, RealTimeUpdate.class); if (verifySignature(payload, signature)) { logger.debug("Received " + update.getObject() + " update for '" + subscription + "'."); for (UpdateHandler handler : updateHandlers) { handler.handleUpdate(subscription, update); } } else { logger.warn("Received an update, but signature was invalid. Not delegating to handlers."); } return ""; } }
public class class_name { @RequestMapping(value="/{subscription}", method=POST) public @ResponseBody String receiveUpdate( @PathVariable("subscription") String subscription, @RequestBody String payload, @RequestHeader(X_HUB_SIGNATURE) String signature) throws Exception { // Can only read body once and we need it as a raw String to calculate the signature. // Therefore, use Jackson ObjectMapper to give us a RealTimeUpdate object from that raw String. RealTimeUpdate update = new ObjectMapper().readValue(payload, RealTimeUpdate.class); if (verifySignature(payload, signature)) { logger.debug("Received " + update.getObject() + " update for '" + subscription + "'."); for (UpdateHandler handler : updateHandlers) { handler.handleUpdate(subscription, update); // depends on control dependency: [for], data = [handler] } } else { logger.warn("Received an update, but signature was invalid. Not delegating to handlers."); } return ""; } }
public class class_name { public static int getDelimiterOffset(final String line, final int start, final char delimiter) { int idx = line.indexOf(delimiter, start); if (idx >= 0) { idx -= start - 1; } return idx; } }
public class class_name { public static int getDelimiterOffset(final String line, final int start, final char delimiter) { int idx = line.indexOf(delimiter, start); if (idx >= 0) { idx -= start - 1; // depends on control dependency: [if], data = [none] } return idx; } }
public class class_name { public static String encodeStringsAsBase64Parameter(List<String> strings) { JSONArray array = new JSONArray(); for (String string : strings) { array.put(string); } byte[] bytes; try { // use obfuscateBytes here to to make the output look more random bytes = obfuscateBytes(array.toString().getBytes("UTF-8")); } catch (UnsupportedEncodingException e) { // should never happen e.printStackTrace(); throw new RuntimeException(e); } String result = Base64.encodeBase64String(bytes); result = StringUtils.replaceChars(result, BASE64_EXTRA, BASE64_EXTRA_REPLACEMENTS); return result; } }
public class class_name { public static String encodeStringsAsBase64Parameter(List<String> strings) { JSONArray array = new JSONArray(); for (String string : strings) { array.put(string); // depends on control dependency: [for], data = [string] } byte[] bytes; try { // use obfuscateBytes here to to make the output look more random bytes = obfuscateBytes(array.toString().getBytes("UTF-8")); // depends on control dependency: [try], data = [none] } catch (UnsupportedEncodingException e) { // should never happen e.printStackTrace(); throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] String result = Base64.encodeBase64String(bytes); result = StringUtils.replaceChars(result, BASE64_EXTRA, BASE64_EXTRA_REPLACEMENTS); return result; } }
public class class_name { public static String concatenateUrls(String rootUrl, String path) { if (rootUrl == null || rootUrl.isEmpty()) { return path; } if (path == null || path.isEmpty()) { return rootUrl; } String finalUrl; if (rootUrl.charAt(rootUrl.length() - 1) == '/' && path.charAt(0) == '/') { finalUrl = rootUrl.substring(0, rootUrl.length() - 2) + path; } else if (rootUrl.charAt(rootUrl.length() - 1) != '/' && path.charAt(0) != '/') { finalUrl = rootUrl + "/" + path; } else { finalUrl = rootUrl + path; } return finalUrl; } }
public class class_name { public static String concatenateUrls(String rootUrl, String path) { if (rootUrl == null || rootUrl.isEmpty()) { return path; // depends on control dependency: [if], data = [none] } if (path == null || path.isEmpty()) { return rootUrl; // depends on control dependency: [if], data = [none] } String finalUrl; if (rootUrl.charAt(rootUrl.length() - 1) == '/' && path.charAt(0) == '/') { finalUrl = rootUrl.substring(0, rootUrl.length() - 2) + path; // depends on control dependency: [if], data = [none] } else if (rootUrl.charAt(rootUrl.length() - 1) != '/' && path.charAt(0) != '/') { finalUrl = rootUrl + "/" + path; // depends on control dependency: [if], data = [none] } else { finalUrl = rootUrl + path; // depends on control dependency: [if], data = [none] } return finalUrl; } }
public class class_name { public void setThreshold(final double THRESHOLD) { if (Double.compare(THRESHOLD, minValue) >= 0 && Double.compare(THRESHOLD, maxValue) <= 0) { threshold = THRESHOLD; } else { if (THRESHOLD < niceMinValue) { threshold = niceMinValue; } if (THRESHOLD > niceMaxValue) { threshold = niceMaxValue; } } fireStateChanged(); } }
public class class_name { public void setThreshold(final double THRESHOLD) { if (Double.compare(THRESHOLD, minValue) >= 0 && Double.compare(THRESHOLD, maxValue) <= 0) { threshold = THRESHOLD; // depends on control dependency: [if], data = [none] } else { if (THRESHOLD < niceMinValue) { threshold = niceMinValue; // depends on control dependency: [if], data = [none] } if (THRESHOLD > niceMaxValue) { threshold = niceMaxValue; // depends on control dependency: [if], data = [none] } } fireStateChanged(); } }
public class class_name { public final EObject ruleDisjunction() throws RecognitionException { EObject current = null; Token otherlv_2=null; EObject this_Conjunction_0 = null; EObject lv_right_3_0 = null; enterRule(); try { // InternalXtext.g:1771:2: ( (this_Conjunction_0= ruleConjunction ( () otherlv_2= '|' ( (lv_right_3_0= ruleConjunction ) ) )* ) ) // InternalXtext.g:1772:2: (this_Conjunction_0= ruleConjunction ( () otherlv_2= '|' ( (lv_right_3_0= ruleConjunction ) ) )* ) { // InternalXtext.g:1772:2: (this_Conjunction_0= ruleConjunction ( () otherlv_2= '|' ( (lv_right_3_0= ruleConjunction ) ) )* ) // InternalXtext.g:1773:3: this_Conjunction_0= ruleConjunction ( () otherlv_2= '|' ( (lv_right_3_0= ruleConjunction ) ) )* { newCompositeNode(grammarAccess.getDisjunctionAccess().getConjunctionParserRuleCall_0()); pushFollow(FollowSets000.FOLLOW_25); this_Conjunction_0=ruleConjunction(); state._fsp--; current = this_Conjunction_0; afterParserOrEnumRuleCall(); // InternalXtext.g:1781:3: ( () otherlv_2= '|' ( (lv_right_3_0= ruleConjunction ) ) )* loop44: do { int alt44=2; int LA44_0 = input.LA(1); if ( (LA44_0==30) ) { alt44=1; } switch (alt44) { case 1 : // InternalXtext.g:1782:4: () otherlv_2= '|' ( (lv_right_3_0= ruleConjunction ) ) { // InternalXtext.g:1782:4: () // InternalXtext.g:1783:5: { current = forceCreateModelElementAndSet( grammarAccess.getDisjunctionAccess().getDisjunctionLeftAction_1_0(), current); } otherlv_2=(Token)match(input,30,FollowSets000.FOLLOW_26); newLeafNode(otherlv_2, grammarAccess.getDisjunctionAccess().getVerticalLineKeyword_1_1()); // InternalXtext.g:1793:4: ( (lv_right_3_0= ruleConjunction ) ) // InternalXtext.g:1794:5: (lv_right_3_0= ruleConjunction ) { // InternalXtext.g:1794:5: (lv_right_3_0= ruleConjunction ) // InternalXtext.g:1795:6: lv_right_3_0= ruleConjunction { newCompositeNode(grammarAccess.getDisjunctionAccess().getRightConjunctionParserRuleCall_1_2_0()); pushFollow(FollowSets000.FOLLOW_25); lv_right_3_0=ruleConjunction(); state._fsp--; if (current==null) { current = createModelElementForParent(grammarAccess.getDisjunctionRule()); } set( current, "right", lv_right_3_0, "org.eclipse.xtext.Xtext.Conjunction"); afterParserOrEnumRuleCall(); } } } break; default : break loop44; } } while (true); } } leaveRule(); } catch (RecognitionException re) { recover(input,re); appendSkippedTokens(); } finally { } return current; } }
public class class_name { public final EObject ruleDisjunction() throws RecognitionException { EObject current = null; Token otherlv_2=null; EObject this_Conjunction_0 = null; EObject lv_right_3_0 = null; enterRule(); try { // InternalXtext.g:1771:2: ( (this_Conjunction_0= ruleConjunction ( () otherlv_2= '|' ( (lv_right_3_0= ruleConjunction ) ) )* ) ) // InternalXtext.g:1772:2: (this_Conjunction_0= ruleConjunction ( () otherlv_2= '|' ( (lv_right_3_0= ruleConjunction ) ) )* ) { // InternalXtext.g:1772:2: (this_Conjunction_0= ruleConjunction ( () otherlv_2= '|' ( (lv_right_3_0= ruleConjunction ) ) )* ) // InternalXtext.g:1773:3: this_Conjunction_0= ruleConjunction ( () otherlv_2= '|' ( (lv_right_3_0= ruleConjunction ) ) )* { newCompositeNode(grammarAccess.getDisjunctionAccess().getConjunctionParserRuleCall_0()); pushFollow(FollowSets000.FOLLOW_25); this_Conjunction_0=ruleConjunction(); state._fsp--; current = this_Conjunction_0; afterParserOrEnumRuleCall(); // InternalXtext.g:1781:3: ( () otherlv_2= '|' ( (lv_right_3_0= ruleConjunction ) ) )* loop44: do { int alt44=2; int LA44_0 = input.LA(1); if ( (LA44_0==30) ) { alt44=1; // depends on control dependency: [if], data = [none] } switch (alt44) { case 1 : // InternalXtext.g:1782:4: () otherlv_2= '|' ( (lv_right_3_0= ruleConjunction ) ) { // InternalXtext.g:1782:4: () // InternalXtext.g:1783:5: { current = forceCreateModelElementAndSet( grammarAccess.getDisjunctionAccess().getDisjunctionLeftAction_1_0(), current); } otherlv_2=(Token)match(input,30,FollowSets000.FOLLOW_26); newLeafNode(otherlv_2, grammarAccess.getDisjunctionAccess().getVerticalLineKeyword_1_1()); // InternalXtext.g:1793:4: ( (lv_right_3_0= ruleConjunction ) ) // InternalXtext.g:1794:5: (lv_right_3_0= ruleConjunction ) { // InternalXtext.g:1794:5: (lv_right_3_0= ruleConjunction ) // InternalXtext.g:1795:6: lv_right_3_0= ruleConjunction { newCompositeNode(grammarAccess.getDisjunctionAccess().getRightConjunctionParserRuleCall_1_2_0()); pushFollow(FollowSets000.FOLLOW_25); lv_right_3_0=ruleConjunction(); state._fsp--; if (current==null) { current = createModelElementForParent(grammarAccess.getDisjunctionRule()); // depends on control dependency: [if], data = [none] } set( current, "right", lv_right_3_0, "org.eclipse.xtext.Xtext.Conjunction"); afterParserOrEnumRuleCall(); } } } break; default : break loop44; } } while (true); } } leaveRule(); } catch (RecognitionException re) { recover(input,re); appendSkippedTokens(); } finally { } return current; } }
public class class_name { public JsonGetterContext getContext(String queryPath) { JsonGetterContext context = internalCache.get(queryPath); if (context != null) { return context; } context = new JsonGetterContext(queryPath); JsonGetterContext previousContextValue = internalCache.putIfAbsent(queryPath, context); if (previousContextValue == null) { cleanupIfNeccessary(context); return context; } else { return previousContextValue; } } }
public class class_name { public JsonGetterContext getContext(String queryPath) { JsonGetterContext context = internalCache.get(queryPath); if (context != null) { return context; // depends on control dependency: [if], data = [none] } context = new JsonGetterContext(queryPath); JsonGetterContext previousContextValue = internalCache.putIfAbsent(queryPath, context); if (previousContextValue == null) { cleanupIfNeccessary(context); // depends on control dependency: [if], data = [none] return context; // depends on control dependency: [if], data = [none] } else { return previousContextValue; // depends on control dependency: [if], data = [none] } } }
public class class_name { boolean addIndex(WriteStream out, Page page, Page newPage, int saveSequence, Type type, int pid, int nextPid, int pageOffset, int pageLength, Result<Integer> result) throws IOException { int head; while ((head = _segment.writePageIndex(_indexBuffer, _indexTail, type.ordinal(), pid, nextPid, pageOffset, pageLength)) < 0) { writeIndexBlock(); // isCont is true if the new page/entry fits in the segment boolean isCont = _position + 2 * BLOCK_SIZE <= _indexAddress; if (! isCont) { // close(); return false; } _indexAddress -= BLOCK_SIZE; fillHeader(); } _isDirty = true; _indexTail = head; int position = pageOffset; //System.out.print(" [" + page.getId() + ",seq=" + page.getSequence() + "]"); if (isClosed()) { System.out.println("FLUSH_AFTER_CLOSE"); } _pendingFlushEntries.add(new PendingEntry(page, newPage, saveSequence, position, _indexAddress, head, result)); return true; } }
public class class_name { boolean addIndex(WriteStream out, Page page, Page newPage, int saveSequence, Type type, int pid, int nextPid, int pageOffset, int pageLength, Result<Integer> result) throws IOException { int head; while ((head = _segment.writePageIndex(_indexBuffer, _indexTail, type.ordinal(), pid, nextPid, pageOffset, pageLength)) < 0) { writeIndexBlock(); // isCont is true if the new page/entry fits in the segment boolean isCont = _position + 2 * BLOCK_SIZE <= _indexAddress; if (! isCont) { // close(); return false; // depends on control dependency: [if], data = [none] } _indexAddress -= BLOCK_SIZE; fillHeader(); } _isDirty = true; _indexTail = head; int position = pageOffset; //System.out.print(" [" + page.getId() + ",seq=" + page.getSequence() + "]"); if (isClosed()) { System.out.println("FLUSH_AFTER_CLOSE"); } _pendingFlushEntries.add(new PendingEntry(page, newPage, saveSequence, position, _indexAddress, head, result)); return true; } }
public class class_name { private boolean isExpressionTreeUnsafe(Node tree, boolean followingSideEffectsExist) { if (tree.isSpread()) { // Spread expressions would cause recursive rewriting if not special cased here. switch (tree.getParent().getToken()) { case OBJECTLIT: // Spreading an object, rather than an iterable, is assumed to be pure. That assesment is // based on the compiler assumption that getters are pure. This check say nothing of the // expression being spread. break; case ARRAYLIT: case CALL: case NEW: // When extracted, spreads can't be assigned to a single variable and instead are put into // an array-literal. However, that literal must be spread again at the original site. This // check is what prevents the original spread from triggering recursion. if (isTempConstantValueName(tree.getOnlyChild())) { return false; } break; default: throw new IllegalStateException( "Unexpected parent of SPREAD: " + tree.getParent().toStringTree()); } } if (followingSideEffectsExist) { // If the call to be inlined has side-effects, check to see if this // expression tree can be affected by any side-effects. // Assume that "tmp1.call(...)" is safe (where tmp1 is a const temp variable created by // ExpressionDecomposer) otherwise we end up trying to decompose the same tree // an infinite number of times. Node parent = tree.getParent(); if (NodeUtil.isObjectCallMethod(parent, "call") && tree.isFirstChildOf(parent) && isTempConstantValueName(tree.getFirstChild())) { return false; } // This is a superset of "NodeUtil.mayHaveSideEffects". return NodeUtil.canBeSideEffected(tree, this.knownConstants, scope); } else { // The function called doesn't have side-effects but check to see if there // are side-effects that that may affect it. return NodeUtil.mayHaveSideEffects(tree, compiler); } } }
public class class_name { private boolean isExpressionTreeUnsafe(Node tree, boolean followingSideEffectsExist) { if (tree.isSpread()) { // Spread expressions would cause recursive rewriting if not special cased here. switch (tree.getParent().getToken()) { case OBJECTLIT: // Spreading an object, rather than an iterable, is assumed to be pure. That assesment is // based on the compiler assumption that getters are pure. This check say nothing of the // expression being spread. break; case ARRAYLIT: case CALL: case NEW: // When extracted, spreads can't be assigned to a single variable and instead are put into // an array-literal. However, that literal must be spread again at the original site. This // check is what prevents the original spread from triggering recursion. if (isTempConstantValueName(tree.getOnlyChild())) { return false; // depends on control dependency: [if], data = [none] } break; default: throw new IllegalStateException( "Unexpected parent of SPREAD: " + tree.getParent().toStringTree()); } } if (followingSideEffectsExist) { // If the call to be inlined has side-effects, check to see if this // expression tree can be affected by any side-effects. // Assume that "tmp1.call(...)" is safe (where tmp1 is a const temp variable created by // ExpressionDecomposer) otherwise we end up trying to decompose the same tree // an infinite number of times. Node parent = tree.getParent(); if (NodeUtil.isObjectCallMethod(parent, "call") && tree.isFirstChildOf(parent) && isTempConstantValueName(tree.getFirstChild())) { return false; // depends on control dependency: [if], data = [none] } // This is a superset of "NodeUtil.mayHaveSideEffects". return NodeUtil.canBeSideEffected(tree, this.knownConstants, scope); // depends on control dependency: [if], data = [none] } else { // The function called doesn't have side-effects but check to see if there // are side-effects that that may affect it. return NodeUtil.mayHaveSideEffects(tree, compiler); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String getDefaultPortForScheme(final String scheme) { if ("ftp".equals(scheme)) { return "21"; } if ("file".equals(scheme)) { return null; } if ("gopher".equals(scheme)) { return "70"; } if ("http".equals(scheme)) { return "80"; } if ("https".equals(scheme)) { return "443"; } if ("ws".equals(scheme)) { return "80"; } if ("wss".equals(scheme)) { return "443"; } return null; } }
public class class_name { public static String getDefaultPortForScheme(final String scheme) { if ("ftp".equals(scheme)) { return "21"; // depends on control dependency: [if], data = [none] } if ("file".equals(scheme)) { return null; // depends on control dependency: [if], data = [none] } if ("gopher".equals(scheme)) { return "70"; // depends on control dependency: [if], data = [none] } if ("http".equals(scheme)) { return "80"; // depends on control dependency: [if], data = [none] } if ("https".equals(scheme)) { return "443"; // depends on control dependency: [if], data = [none] } if ("ws".equals(scheme)) { return "80"; // depends on control dependency: [if], data = [none] } if ("wss".equals(scheme)) { return "443"; // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public void marshall(AttributeKey attributeKey, ProtocolMarshaller protocolMarshaller) { if (attributeKey == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(attributeKey.getSchemaArn(), SCHEMAARN_BINDING); protocolMarshaller.marshall(attributeKey.getFacetName(), FACETNAME_BINDING); protocolMarshaller.marshall(attributeKey.getName(), NAME_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(AttributeKey attributeKey, ProtocolMarshaller protocolMarshaller) { if (attributeKey == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(attributeKey.getSchemaArn(), SCHEMAARN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(attributeKey.getFacetName(), FACETNAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(attributeKey.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private static boolean isTransformPossible(byte[] bytes) { if (bytes.length < 8) { return false; } // The transform method will be called for all classes, but ASM is only // capable of processing some class file format versions. That's ok // because the transformer only modifies classes that have been // preprocessed by our build anyway. // // ASM doesn't provide a way to determine its max supported version, so // we hard code the supported version number. int classFileVersion = ((bytes[6] & 0xff) << 8) | (bytes[7] & 0xff); //Limit bytecode that we transform based on JDK retransform compatibility //If we have issues here, 1.8 classes will be instead handled by a separate //transformer that only does those classes. if (isJDK8WithHotReplaceBug) return classFileVersion <= Opcodes.V1_7; else return classFileVersion <= Opcodes.V11; } }
public class class_name { private static boolean isTransformPossible(byte[] bytes) { if (bytes.length < 8) { return false; // depends on control dependency: [if], data = [none] } // The transform method will be called for all classes, but ASM is only // capable of processing some class file format versions. That's ok // because the transformer only modifies classes that have been // preprocessed by our build anyway. // // ASM doesn't provide a way to determine its max supported version, so // we hard code the supported version number. int classFileVersion = ((bytes[6] & 0xff) << 8) | (bytes[7] & 0xff); //Limit bytecode that we transform based on JDK retransform compatibility //If we have issues here, 1.8 classes will be instead handled by a separate //transformer that only does those classes. if (isJDK8WithHotReplaceBug) return classFileVersion <= Opcodes.V1_7; else return classFileVersion <= Opcodes.V11; } }
public class class_name { public static String getPropertyNameConvention(Object object, String suffix) { if (object != null) { Class<?> type = object.getClass(); if (type.isArray()) { return getPropertyName(type.getComponentType()) + suffix + "Array"; } if (object instanceof Collection) { Collection coll = (Collection) object; if (coll.isEmpty()) { return "emptyCollection"; } Object first = coll.iterator().next(); if (coll instanceof List) { return getPropertyName(first.getClass()) + suffix + "List"; } if (coll instanceof Set) { return getPropertyName(first.getClass()) + suffix + "Set"; } return getPropertyName(first.getClass()) + suffix + "Collection"; } if (object instanceof Map) { Map map = (Map) object; if (map.isEmpty()) { return "emptyMap"; } Object entry = map.values().iterator().next(); if (entry != null) { return getPropertyName(entry.getClass()) + suffix + "Map"; } } else { return getPropertyName(object.getClass()) + suffix; } } return null; } }
public class class_name { public static String getPropertyNameConvention(Object object, String suffix) { if (object != null) { Class<?> type = object.getClass(); if (type.isArray()) { return getPropertyName(type.getComponentType()) + suffix + "Array"; // depends on control dependency: [if], data = [none] } if (object instanceof Collection) { Collection coll = (Collection) object; if (coll.isEmpty()) { return "emptyCollection"; // depends on control dependency: [if], data = [none] } Object first = coll.iterator().next(); if (coll instanceof List) { return getPropertyName(first.getClass()) + suffix + "List"; // depends on control dependency: [if], data = [none] } if (coll instanceof Set) { return getPropertyName(first.getClass()) + suffix + "Set"; // depends on control dependency: [if], data = [none] } return getPropertyName(first.getClass()) + suffix + "Collection"; // depends on control dependency: [if], data = [none] } if (object instanceof Map) { Map map = (Map) object; if (map.isEmpty()) { return "emptyMap"; // depends on control dependency: [if], data = [none] } Object entry = map.values().iterator().next(); if (entry != null) { return getPropertyName(entry.getClass()) + suffix + "Map"; // depends on control dependency: [if], data = [(entry] } } else { return getPropertyName(object.getClass()) + suffix; // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { private JSONArray datesToJson(Collection<Date> individualDates) { if (null != individualDates) { JSONArray result = new JSONArray(); for (Date d : individualDates) { result.put(dateToJson(d)); } return result; } return null; } }
public class class_name { private JSONArray datesToJson(Collection<Date> individualDates) { if (null != individualDates) { JSONArray result = new JSONArray(); for (Date d : individualDates) { result.put(dateToJson(d)); // depends on control dependency: [for], data = [d] } return result; // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { private void prefetchRelationships(Query query) { List prefetchedRel; Collection owners; String relName; RelationshipPrefetcher[] prefetchers; if (query == null || query.getPrefetchedRelationships() == null || query.getPrefetchedRelationships().isEmpty()) { return; } if (!supportsAdvancedJDBCCursorControl()) { logger.info("prefetching relationships requires JDBC level 2.0"); return; } // prevent releasing of DBResources setInBatchedMode(true); prefetchedRel = query.getPrefetchedRelationships(); prefetchers = new RelationshipPrefetcher[prefetchedRel.size()]; // disable auto retrieve for all prefetched relationships for (int i = 0; i < prefetchedRel.size(); i++) { relName = (String) prefetchedRel.get(i); prefetchers[i] = getBroker().getRelationshipPrefetcherFactory() .createRelationshipPrefetcher(getQueryObject().getClassDescriptor(), relName); prefetchers[i].prepareRelationshipSettings(); } // materialize ALL owners of this Iterator owners = getOwnerObjects(); // prefetch relationships and associate with owners for (int i = 0; i < prefetchedRel.size(); i++) { prefetchers[i].prefetchRelationship(owners); } // reset auto retrieve for all prefetched relationships for (int i = 0; i < prefetchedRel.size(); i++) { prefetchers[i].restoreRelationshipSettings(); } try { getRsAndStmt().m_rs.beforeFirst(); // reposition resultset jdbc 2.0 } catch (SQLException e) { logger.error("beforeFirst failed !", e); } setInBatchedMode(false); setHasCalledCheck(false); } }
public class class_name { private void prefetchRelationships(Query query) { List prefetchedRel; Collection owners; String relName; RelationshipPrefetcher[] prefetchers; if (query == null || query.getPrefetchedRelationships() == null || query.getPrefetchedRelationships().isEmpty()) { return; // depends on control dependency: [if], data = [none] } if (!supportsAdvancedJDBCCursorControl()) { logger.info("prefetching relationships requires JDBC level 2.0"); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } // prevent releasing of DBResources setInBatchedMode(true); prefetchedRel = query.getPrefetchedRelationships(); prefetchers = new RelationshipPrefetcher[prefetchedRel.size()]; // disable auto retrieve for all prefetched relationships for (int i = 0; i < prefetchedRel.size(); i++) { relName = (String) prefetchedRel.get(i); // depends on control dependency: [for], data = [i] prefetchers[i] = getBroker().getRelationshipPrefetcherFactory() .createRelationshipPrefetcher(getQueryObject().getClassDescriptor(), relName); // depends on control dependency: [for], data = [i] prefetchers[i].prepareRelationshipSettings(); // depends on control dependency: [for], data = [i] } // materialize ALL owners of this Iterator owners = getOwnerObjects(); // prefetch relationships and associate with owners for (int i = 0; i < prefetchedRel.size(); i++) { prefetchers[i].prefetchRelationship(owners); // depends on control dependency: [for], data = [i] } // reset auto retrieve for all prefetched relationships for (int i = 0; i < prefetchedRel.size(); i++) { prefetchers[i].restoreRelationshipSettings(); // depends on control dependency: [for], data = [i] } try { getRsAndStmt().m_rs.beforeFirst(); // reposition resultset jdbc 2.0 // depends on control dependency: [try], data = [none] } catch (SQLException e) { logger.error("beforeFirst failed !", e); } // depends on control dependency: [catch], data = [none] setInBatchedMode(false); setHasCalledCheck(false); } }
public class class_name { public boolean applyLayout(Layout layout) { boolean applied = false; if (layout != null && isValidLayout(layout)) { Layout defaultLayout = getDefaultLayout(); if (layout != defaultLayout) { mLayouts.remove(defaultLayout); } if(mLayouts.add(layout)) { layout.onLayoutApplied(this, mViewPort); applied = true; } } return applied; } }
public class class_name { public boolean applyLayout(Layout layout) { boolean applied = false; if (layout != null && isValidLayout(layout)) { Layout defaultLayout = getDefaultLayout(); if (layout != defaultLayout) { mLayouts.remove(defaultLayout); // depends on control dependency: [if], data = [defaultLayout)] } if(mLayouts.add(layout)) { layout.onLayoutApplied(this, mViewPort); // depends on control dependency: [if], data = [none] applied = true; // depends on control dependency: [if], data = [none] } } return applied; } }
public class class_name { static Policy getPolicy() { final SecurityManager sm = System.getSecurityManager(); if (sm != null) { return AccessController.doPrivileged(new PrivilegedAction<Policy>() { public Policy run() { return Policy.getPolicy(); } }); } else { return Policy.getPolicy(); } } }
public class class_name { static Policy getPolicy() { final SecurityManager sm = System.getSecurityManager(); if (sm != null) { return AccessController.doPrivileged(new PrivilegedAction<Policy>() { public Policy run() { return Policy.getPolicy(); } }); // depends on control dependency: [if], data = [none] } else { return Policy.getPolicy(); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public MatchResponse getMatchResponse(SecurityConstraint securityConstraint, String resourceName, String method) { CollectionMatch collectionMatch = getCollectionMatch(securityConstraint.getWebResourceCollections(), resourceName, method); if (CollectionMatch.RESPONSE_NO_MATCH.equals(collectionMatch) || (collectionMatch == null && securityConstraint.getRoles().isEmpty() && securityConstraint.isAccessPrecluded() == false)) { return MatchResponse.NO_MATCH_RESPONSE; } else if (collectionMatch == null) { return MatchResponse.CUSTOM_NO_MATCH_RESPONSE; } return new MatchResponse(securityConstraint.getRoles(), securityConstraint.isSSLRequired(), securityConstraint.isAccessPrecluded(), collectionMatch); } }
public class class_name { @Override public MatchResponse getMatchResponse(SecurityConstraint securityConstraint, String resourceName, String method) { CollectionMatch collectionMatch = getCollectionMatch(securityConstraint.getWebResourceCollections(), resourceName, method); if (CollectionMatch.RESPONSE_NO_MATCH.equals(collectionMatch) || (collectionMatch == null && securityConstraint.getRoles().isEmpty() && securityConstraint.isAccessPrecluded() == false)) { return MatchResponse.NO_MATCH_RESPONSE; // depends on control dependency: [if], data = [none] } else if (collectionMatch == null) { return MatchResponse.CUSTOM_NO_MATCH_RESPONSE; // depends on control dependency: [if], data = [none] } return new MatchResponse(securityConstraint.getRoles(), securityConstraint.isSSLRequired(), securityConstraint.isAccessPrecluded(), collectionMatch); } }
public class class_name { public KeysAndAttributes withKeys(Key... keys) { if (getKeys() == null) setKeys(new java.util.ArrayList<Key>(keys.length)); for (Key value : keys) { getKeys().add(value); } return this; } }
public class class_name { public KeysAndAttributes withKeys(Key... keys) { if (getKeys() == null) setKeys(new java.util.ArrayList<Key>(keys.length)); for (Key value : keys) { getKeys().add(value); // depends on control dependency: [for], data = [value] } return this; } }
public class class_name { private String sortWeakReferenceValues(String name, String value) { Set<String> refs = new TreeSet<>(); DocViewProperty prop = DocViewProperty.parse(name, value); for (int i = 0; i < prop.values.length; i++) { refs.add(prop.values[i]); } List<Value> values = new ArrayList<>(); for (String ref : refs) { values.add(new MockValue(ref, PropertyType.WEAKREFERENCE)); } try { String sortedValues = DocViewProperty.format(new MockProperty(name, true, values.toArray(new Value[values.size()]))); return sortedValues; } catch (RepositoryException ex) { throw new RuntimeException("Unable to format value for " + name, ex); } } }
public class class_name { private String sortWeakReferenceValues(String name, String value) { Set<String> refs = new TreeSet<>(); DocViewProperty prop = DocViewProperty.parse(name, value); for (int i = 0; i < prop.values.length; i++) { refs.add(prop.values[i]); // depends on control dependency: [for], data = [i] } List<Value> values = new ArrayList<>(); for (String ref : refs) { values.add(new MockValue(ref, PropertyType.WEAKREFERENCE)); // depends on control dependency: [for], data = [ref] } try { String sortedValues = DocViewProperty.format(new MockProperty(name, true, values.toArray(new Value[values.size()]))); return sortedValues; // depends on control dependency: [try], data = [none] } catch (RepositoryException ex) { throw new RuntimeException("Unable to format value for " + name, ex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private TableLoadResult createScheduleExceptionsTable() { // check to see if the schedule_exceptions table exists boolean scheduleExceptionsTableExists = tableExists(feedIdToSnapshot, "schedule_exceptions"); String scheduleExceptionsTableName = tablePrefix + "schedule_exceptions"; if (scheduleExceptionsTableExists) { // schedule_exceptions table already exists in namespace being copied from. Therefore, we simply copy it. return copy(Table.SCHEDULE_EXCEPTIONS, true); } else { // schedule_exceptions does not exist. Therefore, we generate schedule_exceptions from the calendar_dates. TableLoadResult tableLoadResult = new TableLoadResult(); try { Table.SCHEDULE_EXCEPTIONS.createSqlTable( connection, tablePrefix.replace(".", ""), true ); String sql = String.format( "insert into %s (name, dates, exemplar, added_service, removed_service) values (?, ?, ?, ?, ?)", scheduleExceptionsTableName ); PreparedStatement scheduleExceptionsStatement = connection.prepareStatement(sql); final BatchTracker scheduleExceptionsTracker = new BatchTracker( "schedule_exceptions", scheduleExceptionsStatement ); JDBCTableReader<CalendarDate> calendarDatesReader = new JDBCTableReader( Table.CALENDAR_DATES, dataSource, feedIdToSnapshot + ".", EntityPopulator.CALENDAR_DATE ); Iterable<CalendarDate> calendarDates = calendarDatesReader.getAll(); // Keep track of calendars by service id in case we need to add dummy calendar entries. Map<String, Calendar> calendarsByServiceId = new HashMap<>(); // Iterate through calendar dates to build up to get maps from exceptions to their dates. Multimap<String, String> removedServiceForDate = HashMultimap.create(); Multimap<String, String> addedServiceForDate = HashMultimap.create(); for (CalendarDate calendarDate : calendarDates) { // Skip any null dates if (calendarDate.date == null) { LOG.warn("Encountered calendar date record with null value for date field. Skipping."); continue; } String date = calendarDate.date.format(DateTimeFormatter.BASIC_ISO_DATE); if (calendarDate.exception_type == 1) { addedServiceForDate.put(date, calendarDate.service_id); // create (if needed) and extend range of dummy calendar that would need to be created if we are // copying from a feed that doesn't have the calendar.txt file Calendar calendar = calendarsByServiceId.getOrDefault(calendarDate.service_id, new Calendar()); calendar.service_id = calendarDate.service_id; if (calendar.start_date == null || calendar.start_date.isAfter(calendarDate.date)) { calendar.start_date = calendarDate.date; } if (calendar.end_date == null || calendar.end_date.isBefore(calendarDate.date)) { calendar.end_date = calendarDate.date; } calendarsByServiceId.put(calendarDate.service_id, calendar); } else { removedServiceForDate.put(date, calendarDate.service_id); } } // Iterate through dates with added or removed service and add to database. // For usability and simplicity of code, don't attempt to find all dates with similar // added and removed services, but simply create an entry for each found date. for (String date : Sets.union(removedServiceForDate.keySet(), addedServiceForDate.keySet())) { scheduleExceptionsStatement.setString(1, date); String[] dates = {date}; scheduleExceptionsStatement.setArray(2, connection.createArrayOf("text", dates)); scheduleExceptionsStatement.setInt(3, 9); // FIXME use better static type scheduleExceptionsStatement.setArray( 4, connection.createArrayOf("text", addedServiceForDate.get(date).toArray()) ); scheduleExceptionsStatement.setArray( 5, connection.createArrayOf("text", removedServiceForDate.get(date).toArray()) ); scheduleExceptionsTracker.addBatch(); } scheduleExceptionsTracker.executeRemaining(); // determine if we appear to be working with a calendar_dates-only feed. // If so, we must also add dummy entries to the calendar table if ( feedIdToSnapshot != null && !tableExists(feedIdToSnapshot, "calendar") && calendarDatesReader.getRowCount() > 0 ) { sql = String.format( "insert into %s (service_id, description, start_date, end_date, " + "monday, tuesday, wednesday, thursday, friday, saturday, sunday)" + "values (?, ?, ?, ?, 0, 0, 0, 0, 0, 0, 0)", tablePrefix + "calendar" ); PreparedStatement calendarStatement = connection.prepareStatement(sql); final BatchTracker calendarsTracker = new BatchTracker( "calendar", calendarStatement ); for (Calendar calendar : calendarsByServiceId.values()) { calendarStatement.setString(1, calendar.service_id); calendarStatement.setString( 2, String.format("%s (auto-generated)", calendar.service_id) ); calendarStatement.setString( 3, calendar.start_date.format(DateTimeFormatter.BASIC_ISO_DATE) ); calendarStatement.setString( 4, calendar.end_date.format(DateTimeFormatter.BASIC_ISO_DATE) ); calendarsTracker.addBatch(); } calendarsTracker.executeRemaining(); } connection.commit(); } catch (Exception e) { tableLoadResult.fatalException = e.toString(); LOG.error("Error creating schedule Exceptions: ", e); e.printStackTrace(); try { connection.rollback(); } catch (SQLException ex) { ex.printStackTrace(); } } LOG.info("done creating schedule exceptions"); return tableLoadResult; } } }
public class class_name { private TableLoadResult createScheduleExceptionsTable() { // check to see if the schedule_exceptions table exists boolean scheduleExceptionsTableExists = tableExists(feedIdToSnapshot, "schedule_exceptions"); String scheduleExceptionsTableName = tablePrefix + "schedule_exceptions"; if (scheduleExceptionsTableExists) { // schedule_exceptions table already exists in namespace being copied from. Therefore, we simply copy it. return copy(Table.SCHEDULE_EXCEPTIONS, true); // depends on control dependency: [if], data = [none] } else { // schedule_exceptions does not exist. Therefore, we generate schedule_exceptions from the calendar_dates. TableLoadResult tableLoadResult = new TableLoadResult(); try { Table.SCHEDULE_EXCEPTIONS.createSqlTable( connection, tablePrefix.replace(".", ""), true ); // depends on control dependency: [try], data = [none] String sql = String.format( "insert into %s (name, dates, exemplar, added_service, removed_service) values (?, ?, ?, ?, ?)", scheduleExceptionsTableName ); PreparedStatement scheduleExceptionsStatement = connection.prepareStatement(sql); final BatchTracker scheduleExceptionsTracker = new BatchTracker( "schedule_exceptions", scheduleExceptionsStatement ); JDBCTableReader<CalendarDate> calendarDatesReader = new JDBCTableReader( Table.CALENDAR_DATES, dataSource, feedIdToSnapshot + ".", EntityPopulator.CALENDAR_DATE ); Iterable<CalendarDate> calendarDates = calendarDatesReader.getAll(); // Keep track of calendars by service id in case we need to add dummy calendar entries. Map<String, Calendar> calendarsByServiceId = new HashMap<>(); // Iterate through calendar dates to build up to get maps from exceptions to their dates. Multimap<String, String> removedServiceForDate = HashMultimap.create(); Multimap<String, String> addedServiceForDate = HashMultimap.create(); for (CalendarDate calendarDate : calendarDates) { // Skip any null dates if (calendarDate.date == null) { LOG.warn("Encountered calendar date record with null value for date field. Skipping."); // depends on control dependency: [if], data = [none] continue; } String date = calendarDate.date.format(DateTimeFormatter.BASIC_ISO_DATE); if (calendarDate.exception_type == 1) { addedServiceForDate.put(date, calendarDate.service_id); // depends on control dependency: [if], data = [none] // create (if needed) and extend range of dummy calendar that would need to be created if we are // copying from a feed that doesn't have the calendar.txt file Calendar calendar = calendarsByServiceId.getOrDefault(calendarDate.service_id, new Calendar()); calendar.service_id = calendarDate.service_id; // depends on control dependency: [if], data = [none] if (calendar.start_date == null || calendar.start_date.isAfter(calendarDate.date)) { calendar.start_date = calendarDate.date; // depends on control dependency: [if], data = [none] } if (calendar.end_date == null || calendar.end_date.isBefore(calendarDate.date)) { calendar.end_date = calendarDate.date; // depends on control dependency: [if], data = [none] } calendarsByServiceId.put(calendarDate.service_id, calendar); // depends on control dependency: [if], data = [none] } else { removedServiceForDate.put(date, calendarDate.service_id); // depends on control dependency: [if], data = [none] } } // Iterate through dates with added or removed service and add to database. // For usability and simplicity of code, don't attempt to find all dates with similar // added and removed services, but simply create an entry for each found date. for (String date : Sets.union(removedServiceForDate.keySet(), addedServiceForDate.keySet())) { scheduleExceptionsStatement.setString(1, date); // depends on control dependency: [for], data = [date] String[] dates = {date}; scheduleExceptionsStatement.setArray(2, connection.createArrayOf("text", dates)); // depends on control dependency: [for], data = [date] scheduleExceptionsStatement.setInt(3, 9); // FIXME use better static type // depends on control dependency: [for], data = [none] scheduleExceptionsStatement.setArray( 4, connection.createArrayOf("text", addedServiceForDate.get(date).toArray()) ); // depends on control dependency: [for], data = [none] scheduleExceptionsStatement.setArray( 5, connection.createArrayOf("text", removedServiceForDate.get(date).toArray()) ); // depends on control dependency: [for], data = [none] scheduleExceptionsTracker.addBatch(); // depends on control dependency: [for], data = [none] } scheduleExceptionsTracker.executeRemaining(); // depends on control dependency: [try], data = [none] // determine if we appear to be working with a calendar_dates-only feed. // If so, we must also add dummy entries to the calendar table if ( feedIdToSnapshot != null && !tableExists(feedIdToSnapshot, "calendar") && calendarDatesReader.getRowCount() > 0 ) { sql = String.format( "insert into %s (service_id, description, start_date, end_date, " + "monday, tuesday, wednesday, thursday, friday, saturday, sunday)" + "values (?, ?, ?, ?, 0, 0, 0, 0, 0, 0, 0)", tablePrefix + "calendar" ); // depends on control dependency: [if], data = [] PreparedStatement calendarStatement = connection.prepareStatement(sql); final BatchTracker calendarsTracker = new BatchTracker( "calendar", calendarStatement ); for (Calendar calendar : calendarsByServiceId.values()) { calendarStatement.setString(1, calendar.service_id); // depends on control dependency: [for], data = [calendar] calendarStatement.setString( 2, String.format("%s (auto-generated)", calendar.service_id) ); // depends on control dependency: [for], data = [none] calendarStatement.setString( 3, calendar.start_date.format(DateTimeFormatter.BASIC_ISO_DATE) ); // depends on control dependency: [for], data = [none] calendarStatement.setString( 4, calendar.end_date.format(DateTimeFormatter.BASIC_ISO_DATE) ); // depends on control dependency: [for], data = [none] calendarsTracker.addBatch(); // depends on control dependency: [for], data = [calendar] } calendarsTracker.executeRemaining(); // depends on control dependency: [if], data = [] } connection.commit(); // depends on control dependency: [try], data = [none] } catch (Exception e) { tableLoadResult.fatalException = e.toString(); LOG.error("Error creating schedule Exceptions: ", e); e.printStackTrace(); try { connection.rollback(); // depends on control dependency: [try], data = [none] } catch (SQLException ex) { ex.printStackTrace(); } // depends on control dependency: [catch], data = [none] } // depends on control dependency: [catch], data = [none] LOG.info("done creating schedule exceptions"); // depends on control dependency: [if], data = [none] return tableLoadResult; // depends on control dependency: [if], data = [none] } } }
public class class_name { private List<String> findUrlsInAttributeValue(final String attributeValue) { List<String> foundUrls = new ArrayList<>(); urlPatterns.stream() .map((Pattern urlPattern) -> urlPattern.matcher(attributeValue)) .forEach((Matcher urlPatternMatcher) -> { while (urlPatternMatcher.find()) { String foundUrl = urlPatternMatcher.group().trim(); if (validator.test(foundUrl)) { foundUrls.add(foundUrl); } } }); return foundUrls; } }
public class class_name { private List<String> findUrlsInAttributeValue(final String attributeValue) { List<String> foundUrls = new ArrayList<>(); urlPatterns.stream() .map((Pattern urlPattern) -> urlPattern.matcher(attributeValue)) .forEach((Matcher urlPatternMatcher) -> { while (urlPatternMatcher.find()) { String foundUrl = urlPatternMatcher.group().trim(); if (validator.test(foundUrl)) { foundUrls.add(foundUrl); // depends on control dependency: [if], data = [none] } } }); return foundUrls; } }
public class class_name { private static String toJSON(CustomManifest cm) { String output = null; if (cm != null) { String alg = cm.getAlgorithm(); String fi = cm.getFeatureId(); String fn = cm.getFeatureName(); String desc = cm.getDescription(); StringBuffer sb = new StringBuffer("{"); sb.append("\"").append(KEY_ALGORITHM_NAME).append("\":\"").append(alg).append("\","); sb.append("\"").append(KEY_FEATURE_NAME).append("\":\"").append(fi).append(':').append(fn).append("\","); sb.append("\"").append(KEY_DESCRIPTION_NAME).append("\":\"").append(desc).append("\"}"); output = sb.toString(); } return output; } }
public class class_name { private static String toJSON(CustomManifest cm) { String output = null; if (cm != null) { String alg = cm.getAlgorithm(); String fi = cm.getFeatureId(); String fn = cm.getFeatureName(); String desc = cm.getDescription(); StringBuffer sb = new StringBuffer("{"); sb.append("\"").append(KEY_ALGORITHM_NAME).append("\":\"").append(alg).append("\","); // depends on control dependency: [if], data = [none] sb.append("\"").append(KEY_FEATURE_NAME).append("\":\"").append(fi).append(':').append(fn).append("\","); // depends on control dependency: [if], data = [none] sb.append("\"").append(KEY_DESCRIPTION_NAME).append("\":\"").append(desc).append("\"}"); // depends on control dependency: [if], data = [none] output = sb.toString(); // depends on control dependency: [if], data = [none] } return output; } }
public class class_name { private static String uencode(String prefix) { if (prefix != null && prefix.startsWith("file:")) { StringTokenizer tokens = new StringTokenizer(prefix, "/\\:", true); StringBuilder stringBuilder = new StringBuilder(); while (tokens.hasMoreElements()) { String token = tokens.nextToken(); if ("/".equals(token) || "\\".equals(token) || ":".equals(token)) { stringBuilder.append(token); } else { try { stringBuilder.append(URLEncoder.encode(token, "UTF-8")); } catch(java.io.UnsupportedEncodingException ex) { } } } return stringBuilder.toString(); } else { return prefix; } } }
public class class_name { private static String uencode(String prefix) { if (prefix != null && prefix.startsWith("file:")) { StringTokenizer tokens = new StringTokenizer(prefix, "/\\:", true); StringBuilder stringBuilder = new StringBuilder(); while (tokens.hasMoreElements()) { String token = tokens.nextToken(); if ("/".equals(token) || "\\".equals(token) || ":".equals(token)) { stringBuilder.append(token); // depends on control dependency: [if], data = [none] } else { try { stringBuilder.append(URLEncoder.encode(token, "UTF-8")); // depends on control dependency: [try], data = [none] } catch(java.io.UnsupportedEncodingException ex) { } // depends on control dependency: [catch], data = [none] } } return stringBuilder.toString(); // depends on control dependency: [if], data = [none] } else { return prefix; // depends on control dependency: [if], data = [none] } } }
public class class_name { public ImageSource apply(ImageSource input) { ImageSource originalImage = input; int width = originalImage.getWidth(); int height = originalImage.getHeight(); boolean[][] matrix = new boolean[width][height]; // black n white boolean matrix; true = blck, false = white // Copy ImageSource filteredImage = new MatrixSource(input); int[] histogram = OtsuBinarize.imageHistogram(originalImage); int totalNumberOfpixels = height * width; int threshold = OtsuBinarize.threshold(histogram, totalNumberOfpixels); int black = 0; int white = 255; int gray; int alpha; int newColor; for (int i = 0; i < width; i++) { for (int j = 0; j < height; j++) { gray = originalImage.getGray(i, j); if (gray > threshold) { matrix[i][j] = false; } else { matrix[i][j] = true; } } } int blackTreshold = letterThreshold(originalImage, matrix); for (int i = 0; i < width; i++) { for (int j = 0; j < height; j++) { gray = originalImage.getGray(i, j); alpha = originalImage.getA(i, j); if (gray > blackTreshold) { newColor = white; } else { newColor = black; } newColor = ColorHelper.getARGB(newColor, newColor, newColor, alpha); filteredImage.setRGB(i, j, newColor); } } return filteredImage; } }
public class class_name { public ImageSource apply(ImageSource input) { ImageSource originalImage = input; int width = originalImage.getWidth(); int height = originalImage.getHeight(); boolean[][] matrix = new boolean[width][height]; // black n white boolean matrix; true = blck, false = white // Copy ImageSource filteredImage = new MatrixSource(input); int[] histogram = OtsuBinarize.imageHistogram(originalImage); int totalNumberOfpixels = height * width; int threshold = OtsuBinarize.threshold(histogram, totalNumberOfpixels); int black = 0; int white = 255; int gray; int alpha; int newColor; for (int i = 0; i < width; i++) { for (int j = 0; j < height; j++) { gray = originalImage.getGray(i, j); // depends on control dependency: [for], data = [j] if (gray > threshold) { matrix[i][j] = false; // depends on control dependency: [if], data = [none] } else { matrix[i][j] = true; // depends on control dependency: [if], data = [none] } } } int blackTreshold = letterThreshold(originalImage, matrix); for (int i = 0; i < width; i++) { for (int j = 0; j < height; j++) { gray = originalImage.getGray(i, j); // depends on control dependency: [for], data = [j] alpha = originalImage.getA(i, j); // depends on control dependency: [for], data = [j] if (gray > blackTreshold) { newColor = white; // depends on control dependency: [if], data = [none] } else { newColor = black; // depends on control dependency: [if], data = [none] } newColor = ColorHelper.getARGB(newColor, newColor, newColor, alpha); // depends on control dependency: [for], data = [none] filteredImage.setRGB(i, j, newColor); // depends on control dependency: [for], data = [j] } } return filteredImage; } }
public class class_name { protected JSONObject getJSONObject(final String jsonString) { JSONObject json = new JSONObject(); try { json = new JSONObject(jsonString); } catch(NullPointerException e) { LOGGER.error("JSON string cannot be null.", e); } catch(JSONException e) { LOGGER.error("Could not parse string into JSONObject.", e); } return json; } }
public class class_name { protected JSONObject getJSONObject(final String jsonString) { JSONObject json = new JSONObject(); try { json = new JSONObject(jsonString); // depends on control dependency: [try], data = [none] } catch(NullPointerException e) { LOGGER.error("JSON string cannot be null.", e); } // depends on control dependency: [catch], data = [none] catch(JSONException e) { LOGGER.error("Could not parse string into JSONObject.", e); } // depends on control dependency: [catch], data = [none] return json; } }
public class class_name { public static Intent showStreetView(float latitude, float longitude, Float yaw, Integer pitch, Float zoom, Integer mapZoom) { StringBuilder builder = new StringBuilder("google.streetview:cbll=").append(latitude).append(",").append(longitude); if (yaw != null || pitch != null || zoom != null) { String cbpParam = String.format("%s,,%s,%s", yaw == null ? "" : yaw, pitch == null ? "" : pitch, zoom == null ? "" : zoom); builder.append("&cbp=1,").append(cbpParam); } if (mapZoom != null) { builder.append("&mz=").append(mapZoom); } Intent intent = new Intent(); intent.setAction(Intent.ACTION_VIEW); intent.setData(Uri.parse(builder.toString())); return intent; } }
public class class_name { public static Intent showStreetView(float latitude, float longitude, Float yaw, Integer pitch, Float zoom, Integer mapZoom) { StringBuilder builder = new StringBuilder("google.streetview:cbll=").append(latitude).append(",").append(longitude); if (yaw != null || pitch != null || zoom != null) { String cbpParam = String.format("%s,,%s,%s", yaw == null ? "" : yaw, pitch == null ? "" : pitch, zoom == null ? "" : zoom); builder.append("&cbp=1,").append(cbpParam); // depends on control dependency: [if], data = [none] } if (mapZoom != null) { builder.append("&mz=").append(mapZoom); // depends on control dependency: [if], data = [(mapZoom] } Intent intent = new Intent(); intent.setAction(Intent.ACTION_VIEW); intent.setData(Uri.parse(builder.toString())); return intent; } }
public class class_name { public NetworkConnectionType getCurrentConnectionType() { NetworkInfo activeNetworkInfo = getActiveNetworkInfo(); if (activeNetworkInfo == null || !isInternetAccessAvailable()) { return NetworkConnectionType.NO_CONNECTION; } switch (activeNetworkInfo.getType()) { case ConnectivityManager.TYPE_WIFI: return NetworkConnectionType.WIFI; case ConnectivityManager.TYPE_MOBILE: return NetworkConnectionType.MOBILE; case ConnectivityManager.TYPE_WIMAX: return NetworkConnectionType.WIMAX; case ConnectivityManager.TYPE_ETHERNET: return NetworkConnectionType.ETHERNET; default: return NetworkConnectionType.NO_CONNECTION; } } }
public class class_name { public NetworkConnectionType getCurrentConnectionType() { NetworkInfo activeNetworkInfo = getActiveNetworkInfo(); if (activeNetworkInfo == null || !isInternetAccessAvailable()) { return NetworkConnectionType.NO_CONNECTION; // depends on control dependency: [if], data = [none] } switch (activeNetworkInfo.getType()) { case ConnectivityManager.TYPE_WIFI: return NetworkConnectionType.WIFI; case ConnectivityManager.TYPE_MOBILE: return NetworkConnectionType.MOBILE; case ConnectivityManager.TYPE_WIMAX: return NetworkConnectionType.WIMAX; case ConnectivityManager.TYPE_ETHERNET: return NetworkConnectionType.ETHERNET; default: return NetworkConnectionType.NO_CONNECTION; } } }
public class class_name { public static void copyStream(InputStream copyFrom, OutputStream copyTo, GeoPackageProgress progress) throws IOException { try { byte[] buffer = new byte[1024]; int length; while ((progress == null || progress.isActive()) && (length = copyFrom.read(buffer)) > 0) { copyTo.write(buffer, 0, length); if (progress != null) { progress.addProgress(length); } } copyTo.flush(); } finally { closeQuietly(copyTo); closeQuietly(copyFrom); } } }
public class class_name { public static void copyStream(InputStream copyFrom, OutputStream copyTo, GeoPackageProgress progress) throws IOException { try { byte[] buffer = new byte[1024]; int length; while ((progress == null || progress.isActive()) && (length = copyFrom.read(buffer)) > 0) { copyTo.write(buffer, 0, length); if (progress != null) { progress.addProgress(length); // depends on control dependency: [if], data = [none] } } copyTo.flush(); } finally { closeQuietly(copyTo); closeQuietly(copyFrom); } } }
public class class_name { public void remove(String id) { final Lock writeLock = lock.writeLock(); try { writeLock.lock(); final int index = ids.indexOf(id); if (index > -1) { tasks.remove(index); patterns.remove(index); ids.remove(index); size--; } } finally { writeLock.unlock(); } } }
public class class_name { public void remove(String id) { final Lock writeLock = lock.writeLock(); try { writeLock.lock(); // depends on control dependency: [try], data = [none] final int index = ids.indexOf(id); if (index > -1) { tasks.remove(index); // depends on control dependency: [if], data = [(index] patterns.remove(index); // depends on control dependency: [if], data = [(index] ids.remove(index); // depends on control dependency: [if], data = [(index] size--; // depends on control dependency: [if], data = [none] } } finally { writeLock.unlock(); } } }
public class class_name { protected boolean doesLevelMatch(final Level level, final CSNodeWrapper node, boolean matchContent) { if (!EntityUtilities.isNodeALevel(node)) return false; // If the unique id is not from the parser, than use the unique id to compare if (level.getUniqueId() != null && level.getUniqueId().matches("^\\d.*")) { return level.getUniqueId().equals(Integer.toString(node.getId())); } else { // If the target ids match then the level should be the same if (level.getTargetId() != null && level.getTargetId() == node.getTargetId()) { return true; } if (matchContent) { // Make sure the level type matches if (node.getNodeType() != level.getLevelType().getId()) return false; return level.getTitle().equals(node.getTitle()); } else { return StringUtilities.similarDamerauLevenshtein(level.getTitle(), node.getTitle()) >= ProcessorConstants.MIN_MATCH_SIMILARITY; } } } }
public class class_name { protected boolean doesLevelMatch(final Level level, final CSNodeWrapper node, boolean matchContent) { if (!EntityUtilities.isNodeALevel(node)) return false; // If the unique id is not from the parser, than use the unique id to compare if (level.getUniqueId() != null && level.getUniqueId().matches("^\\d.*")) { return level.getUniqueId().equals(Integer.toString(node.getId())); // depends on control dependency: [if], data = [none] } else { // If the target ids match then the level should be the same if (level.getTargetId() != null && level.getTargetId() == node.getTargetId()) { return true; // depends on control dependency: [if], data = [none] } if (matchContent) { // Make sure the level type matches if (node.getNodeType() != level.getLevelType().getId()) return false; return level.getTitle().equals(node.getTitle()); // depends on control dependency: [if], data = [none] } else { return StringUtilities.similarDamerauLevenshtein(level.getTitle(), node.getTitle()) >= ProcessorConstants.MIN_MATCH_SIMILARITY; // depends on control dependency: [if], data = [none] } } } }
public class class_name { public boolean deleteValue(AssociationValue associationValue) { if (idByValue.containsKey(associationValue)) { for (ListGridRecord record : getRecords()) { if (record.getAttributeAsObject(VALUE_HOLDER_RECORD_ATTRIBUTE) == associationValue) { removeData(record); idByValue.remove(associationValue); return true; } } } return false; } }
public class class_name { public boolean deleteValue(AssociationValue associationValue) { if (idByValue.containsKey(associationValue)) { for (ListGridRecord record : getRecords()) { if (record.getAttributeAsObject(VALUE_HOLDER_RECORD_ATTRIBUTE) == associationValue) { removeData(record); // depends on control dependency: [if], data = [none] idByValue.remove(associationValue); // depends on control dependency: [if], data = [associationValue)] return true; // depends on control dependency: [if], data = [none] } } } return false; } }
public class class_name { public static GrayU8 convert(GrayF64 input , double min , double max , int numValues , GrayU8 output ) { if (output == null) { output = new GrayU8(input.width, input.height); } else { output.reshape(input.width,input.height); } if( numValues < 0 || numValues > 256 ) throw new IllegalArgumentException("0 <= numValues <= 256"); numValues -= 1; double range = max-min; for (int y = 0; y < input.height; y++) { int indexIn = input.startIndex + y*input.stride; int indexOut = output.startIndex + y*output.stride; for (int x = 0; x < input.width; x++) { int value = (int)(numValues*((input.data[indexIn++])-min)/range + 0.5); output.data[indexOut++] = (byte)value; } } return output; } }
public class class_name { public static GrayU8 convert(GrayF64 input , double min , double max , int numValues , GrayU8 output ) { if (output == null) { output = new GrayU8(input.width, input.height); // depends on control dependency: [if], data = [none] } else { output.reshape(input.width,input.height); // depends on control dependency: [if], data = [none] } if( numValues < 0 || numValues > 256 ) throw new IllegalArgumentException("0 <= numValues <= 256"); numValues -= 1; double range = max-min; for (int y = 0; y < input.height; y++) { int indexIn = input.startIndex + y*input.stride; int indexOut = output.startIndex + y*output.stride; for (int x = 0; x < input.width; x++) { int value = (int)(numValues*((input.data[indexIn++])-min)/range + 0.5); output.data[indexOut++] = (byte)value; // depends on control dependency: [for], data = [none] } } return output; } }
public class class_name { public GetFaceDetectionResult withFaces(FaceDetection... faces) { if (this.faces == null) { setFaces(new java.util.ArrayList<FaceDetection>(faces.length)); } for (FaceDetection ele : faces) { this.faces.add(ele); } return this; } }
public class class_name { public GetFaceDetectionResult withFaces(FaceDetection... faces) { if (this.faces == null) { setFaces(new java.util.ArrayList<FaceDetection>(faces.length)); // depends on control dependency: [if], data = [none] } for (FaceDetection ele : faces) { this.faces.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public int getDifference(ReadableInstant instant) { if (instant == null) { return getField().getDifference(getMillis(), DateTimeUtils.currentTimeMillis()); } return getField().getDifference(getMillis(), instant.getMillis()); } }
public class class_name { public int getDifference(ReadableInstant instant) { if (instant == null) { return getField().getDifference(getMillis(), DateTimeUtils.currentTimeMillis()); // depends on control dependency: [if], data = [none] } return getField().getDifference(getMillis(), instant.getMillis()); } }
public class class_name { protected EmbeddedCacheManager createCacheManager(Properties properties, ServiceRegistry serviceRegistry) { for (EmbeddedCacheManagerProvider provider : ServiceLoader.load(EmbeddedCacheManagerProvider.class, EmbeddedCacheManagerProvider.class.getClassLoader())) { EmbeddedCacheManager cacheManager = provider.getEmbeddedCacheManager(properties); if (cacheManager != null) { return cacheManager; } } return new DefaultCacheManagerProvider(serviceRegistry).getEmbeddedCacheManager(properties); } }
public class class_name { protected EmbeddedCacheManager createCacheManager(Properties properties, ServiceRegistry serviceRegistry) { for (EmbeddedCacheManagerProvider provider : ServiceLoader.load(EmbeddedCacheManagerProvider.class, EmbeddedCacheManagerProvider.class.getClassLoader())) { EmbeddedCacheManager cacheManager = provider.getEmbeddedCacheManager(properties); if (cacheManager != null) { return cacheManager; // depends on control dependency: [if], data = [none] } } return new DefaultCacheManagerProvider(serviceRegistry).getEmbeddedCacheManager(properties); } }
public class class_name { @SuppressWarnings({ "unchecked", "rawtypes" }) public void sleeRunning() throws InvalidStateException { // if entity is active then activate the ra object if (this.state.isActive()) { if (setFTContext) { setFTContext = false; if (object.isFaultTolerant()) { // set fault tolerant context, it is a ft ra try { this.ftResourceAdaptorContext = new FaultTolerantResourceAdaptorContextImpl(name,sleeContainer,(FaultTolerantResourceAdaptor) object.getResourceAdaptorObject()); object.setFaultTolerantResourceAdaptorContext(ftResourceAdaptorContext); } catch (Throwable t) { logger.error("Got exception invoking setFaultTolerantResourceAdaptorContext(...) for entity "+name, t); } } } try { object.raActive(); } catch (Throwable t) { logger.error("Got exception invoking raActive() for entity "+name, t); } } } }
public class class_name { @SuppressWarnings({ "unchecked", "rawtypes" }) public void sleeRunning() throws InvalidStateException { // if entity is active then activate the ra object if (this.state.isActive()) { if (setFTContext) { setFTContext = false; if (object.isFaultTolerant()) { // set fault tolerant context, it is a ft ra try { this.ftResourceAdaptorContext = new FaultTolerantResourceAdaptorContextImpl(name,sleeContainer,(FaultTolerantResourceAdaptor) object.getResourceAdaptorObject()); // depends on control dependency: [try], data = [none] object.setFaultTolerantResourceAdaptorContext(ftResourceAdaptorContext); // depends on control dependency: [try], data = [none] } catch (Throwable t) { logger.error("Got exception invoking setFaultTolerantResourceAdaptorContext(...) for entity "+name, t); } // depends on control dependency: [catch], data = [none] } } try { object.raActive(); } catch (Throwable t) { logger.error("Got exception invoking raActive() for entity "+name, t); } } } }
public class class_name { public boolean get(final T key, final GetOp op) { if (SHOULD_CHECK) { requireNonNull(key); requireNonNull(op); checkNotClosed(); txn.checkReady(); } kv.keyIn(key); final int rc = LIB.mdb_cursor_get(ptrCursor, kv.pointerKey(), kv .pointerVal(), op.getCode()); if (rc == MDB_NOTFOUND) { return false; } checkRc(rc); kv.keyOut(); kv.valOut(); return true; } }
public class class_name { public boolean get(final T key, final GetOp op) { if (SHOULD_CHECK) { requireNonNull(key); // depends on control dependency: [if], data = [none] requireNonNull(op); // depends on control dependency: [if], data = [none] checkNotClosed(); // depends on control dependency: [if], data = [none] txn.checkReady(); // depends on control dependency: [if], data = [none] } kv.keyIn(key); final int rc = LIB.mdb_cursor_get(ptrCursor, kv.pointerKey(), kv .pointerVal(), op.getCode()); if (rc == MDB_NOTFOUND) { return false; // depends on control dependency: [if], data = [none] } checkRc(rc); kv.keyOut(); kv.valOut(); return true; } }
public class class_name { @Override public void unset(String propName) { if (propName.equals(PROP_ST)) { unsetSt(); } if (propName.equals(PROP_STATE_OR_PROVINCE_NAME)) { unsetStateOrProvinceName(); } if (propName.equals(PROP_STREET)) { unsetStreet(); } if (propName.equals(PROP_SEE_ALSO)) { unsetSeeAlso(); } if (propName.equals(PROP_DESCRIPTION)) { unsetDescription(); } super.unset(propName); } }
public class class_name { @Override public void unset(String propName) { if (propName.equals(PROP_ST)) { unsetSt(); // depends on control dependency: [if], data = [none] } if (propName.equals(PROP_STATE_OR_PROVINCE_NAME)) { unsetStateOrProvinceName(); // depends on control dependency: [if], data = [none] } if (propName.equals(PROP_STREET)) { unsetStreet(); // depends on control dependency: [if], data = [none] } if (propName.equals(PROP_SEE_ALSO)) { unsetSeeAlso(); // depends on control dependency: [if], data = [none] } if (propName.equals(PROP_DESCRIPTION)) { unsetDescription(); // depends on control dependency: [if], data = [none] } super.unset(propName); } }
public class class_name { public void marshall(StartTimerFailedEventAttributes startTimerFailedEventAttributes, ProtocolMarshaller protocolMarshaller) { if (startTimerFailedEventAttributes == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(startTimerFailedEventAttributes.getTimerId(), TIMERID_BINDING); protocolMarshaller.marshall(startTimerFailedEventAttributes.getCause(), CAUSE_BINDING); protocolMarshaller.marshall(startTimerFailedEventAttributes.getDecisionTaskCompletedEventId(), DECISIONTASKCOMPLETEDEVENTID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(StartTimerFailedEventAttributes startTimerFailedEventAttributes, ProtocolMarshaller protocolMarshaller) { if (startTimerFailedEventAttributes == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(startTimerFailedEventAttributes.getTimerId(), TIMERID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(startTimerFailedEventAttributes.getCause(), CAUSE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(startTimerFailedEventAttributes.getDecisionTaskCompletedEventId(), DECISIONTASKCOMPLETEDEVENTID_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public XBELInternalAnnotationDefinition convert(AnnotationDefinition t) { if (t == null || t.getURL() != null) { return null; } XBELInternalAnnotationDefinition dest = new XBELInternalAnnotationDefinition(); String description = t.getDescription(); String id = t.getId(); String usage = t.getUsage(); // If null, set equal to an empty string so that the document can be properly converted. if (description == null) description = ""; if (usage == null) usage = ""; dest.setDescription(description); dest.setId(id); dest.setUsage(usage); AnnotationType type = t.getType(); String value = t.getValue(); switch (type) { case ENUMERATION: List<String> enums = t.getEnums(); XBELListAnnotation xla = new XBELListAnnotation(); List<String> xlaval = xla.getListValue(); xlaval.addAll(enums); dest.setListAnnotation(xla); break; case REGULAR_EXPRESSION: dest.setPatternAnnotation(value); break; default: throw new UnsupportedOperationException("unknown type: " + type); } return dest; } }
public class class_name { @Override public XBELInternalAnnotationDefinition convert(AnnotationDefinition t) { if (t == null || t.getURL() != null) { return null; // depends on control dependency: [if], data = [none] } XBELInternalAnnotationDefinition dest = new XBELInternalAnnotationDefinition(); String description = t.getDescription(); String id = t.getId(); String usage = t.getUsage(); // If null, set equal to an empty string so that the document can be properly converted. if (description == null) description = ""; if (usage == null) usage = ""; dest.setDescription(description); dest.setId(id); dest.setUsage(usage); AnnotationType type = t.getType(); String value = t.getValue(); switch (type) { case ENUMERATION: List<String> enums = t.getEnums(); XBELListAnnotation xla = new XBELListAnnotation(); List<String> xlaval = xla.getListValue(); xlaval.addAll(enums); dest.setListAnnotation(xla); break; case REGULAR_EXPRESSION: dest.setPatternAnnotation(value); break; default: throw new UnsupportedOperationException("unknown type: " + type); } return dest; } }
public class class_name { private static String printStatement(Context context, JCStatement statement) { StringWriter writer = new StringWriter(); try { pretty(context, writer).printStat(statement); } catch (IOException e) { throw new AssertionError("StringWriter cannot throw IOExceptions"); } return writer.toString(); } }
public class class_name { private static String printStatement(Context context, JCStatement statement) { StringWriter writer = new StringWriter(); try { pretty(context, writer).printStat(statement); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new AssertionError("StringWriter cannot throw IOExceptions"); } // depends on control dependency: [catch], data = [none] return writer.toString(); } }
public class class_name { @Override public void doUpdates(MetricsContext unused) { synchronized (this) { for (MetricsBase m : registry.getMetricsList()) { m.pushMetric(metricsRecord); } } metricsRecord.update(); } }
public class class_name { @Override public void doUpdates(MetricsContext unused) { synchronized (this) { for (MetricsBase m : registry.getMetricsList()) { m.pushMetric(metricsRecord); // depends on control dependency: [for], data = [m] } } metricsRecord.update(); } }
public class class_name { private Result buildResult(Filter<S> filter, OrderingList<S> ordering, QueryHints hints) throws SupportException, RepositoryException { List<IndexedQueryAnalyzer<S>.Result> subResults; if (filter == null) { subResults = Collections .singletonList(mIndexAnalyzer.analyze(filter, ordering, hints)); } else { subResults = splitIntoSubResults(filter, ordering, hints); } if (subResults.size() <= 1) { // Total ordering not required. return new Result(subResults); } // If any orderings have an unspecified direction, switch to ASCENDING // or DESCENDING, depending on which is more popular. Then build new // sub-results. for (int pos = 0; pos < ordering.size(); pos++) { OrderedProperty<S> op = ordering.get(pos); if (op.getDirection() != Direction.UNSPECIFIED) { continue; } // Find out which direction is most popular for this property. Tally tally = new Tally(op.getChainedProperty()); for (IndexedQueryAnalyzer<S>.Result result : subResults) { tally.increment(findHandledDirection(result, op)); } ordering = ordering.replace(pos, op.direction(tally.getBestDirection())); // Re-calc with specified direction. Only do one property at a time // since one simple change might alter the query plan. subResults = splitIntoSubResults(filter, ordering, hints); if (subResults.size() <= 1) { // Total ordering no longer required. return new Result(subResults); } } // Gather all the keys available. As ordering properties touch key // properties, they are removed from all key sets. When a key set size // reaches zero, total ordering has been achieved. List<Set<ChainedProperty<S>>> keys = getKeys(); // Check if current ordering is total. for (OrderedProperty<S> op : ordering) { ChainedProperty<S> property = op.getChainedProperty(); if (pruneKeys(keys, property)) { // Found a key which is fully covered, indicating total ordering. return new Result(subResults, ordering); } } // Create a super key which contains all the properties required for // total ordering. The goal here is to append these properties to the // ordering in a fashion that takes advantage of each index's natural // ordering. This in turn should cause any sort operation to operate // over smaller groups. Smaller groups means smaller sort buffers. // Smaller sort buffers makes a merge sort happy. // Super key could be stored simply in a set, but a map makes it // convenient for tracking tallies. Map<ChainedProperty<S>, Tally> superKey = new LinkedHashMap<ChainedProperty<S>, Tally>(); for (Set<ChainedProperty<S>> key : keys) { for (ChainedProperty<S> property : key) { if (!superKey.containsKey(property)) { superKey.put(property, new Tally(property)); } } } // Keep looping until total ordering achieved. while (true) { // For each ordering score, iterate over the entire unused ordering // properties and select the next free property. If property is in // the super key increment a tally associated with property // direction. Choose the property with the best tally and augment // the orderings with it and create new sub-results. Remove the // property from the super key and the key set. If any key is now // fully covered, a total ordering has been achieved. for (IndexedQueryAnalyzer<S>.Result result : subResults) { OrderingScore<S> score = result.getCompositeScore().getOrderingScore(); OrderingList<S> unused = score.getUnusedOrdering(); if (unused.size() > 0) { for (OrderedProperty<S> prop : unused) { ChainedProperty<S> chainedProp = prop.getChainedProperty(); Tally tally = superKey.get(chainedProp); if (tally != null) { tally.increment(prop.getDirection()); } } } OrderingList<S> free = score.getFreeOrdering(); if (free.size() > 0) { OrderedProperty<S> prop = free.get(0); ChainedProperty<S> chainedProp = prop.getChainedProperty(); Tally tally = superKey.get(chainedProp); if (tally != null) { tally.increment(prop.getDirection()); } } } Tally best = bestTally(superKey.values()); ChainedProperty<S> bestProperty = best.getProperty(); // Now augment the orderings and create new sub-results. ordering = ordering.concat(OrderedProperty.get(bestProperty, best.getBestDirection())); subResults = splitIntoSubResults(filter, ordering, hints); if (subResults.size() <= 1) { // Total ordering no longer required. break; } // Remove property from super key and key set... superKey.remove(bestProperty); if (superKey.size() == 0) { break; } if (pruneKeys(keys, bestProperty)) { break; } // Clear the tallies for the next run. for (Tally tally : superKey.values()) { tally.clear(); } } return new Result(subResults, ordering); } }
public class class_name { private Result buildResult(Filter<S> filter, OrderingList<S> ordering, QueryHints hints) throws SupportException, RepositoryException { List<IndexedQueryAnalyzer<S>.Result> subResults; if (filter == null) { subResults = Collections .singletonList(mIndexAnalyzer.analyze(filter, ordering, hints)); } else { subResults = splitIntoSubResults(filter, ordering, hints); } if (subResults.size() <= 1) { // Total ordering not required. return new Result(subResults); } // If any orderings have an unspecified direction, switch to ASCENDING // or DESCENDING, depending on which is more popular. Then build new // sub-results. for (int pos = 0; pos < ordering.size(); pos++) { OrderedProperty<S> op = ordering.get(pos); if (op.getDirection() != Direction.UNSPECIFIED) { continue; } // Find out which direction is most popular for this property. Tally tally = new Tally(op.getChainedProperty()); for (IndexedQueryAnalyzer<S>.Result result : subResults) { tally.increment(findHandledDirection(result, op)); // depends on control dependency: [for], data = [result] } ordering = ordering.replace(pos, op.direction(tally.getBestDirection())); // Re-calc with specified direction. Only do one property at a time // since one simple change might alter the query plan. subResults = splitIntoSubResults(filter, ordering, hints); if (subResults.size() <= 1) { // Total ordering no longer required. return new Result(subResults); // depends on control dependency: [if], data = [none] } } // Gather all the keys available. As ordering properties touch key // properties, they are removed from all key sets. When a key set size // reaches zero, total ordering has been achieved. List<Set<ChainedProperty<S>>> keys = getKeys(); // Check if current ordering is total. for (OrderedProperty<S> op : ordering) { ChainedProperty<S> property = op.getChainedProperty(); if (pruneKeys(keys, property)) { // Found a key which is fully covered, indicating total ordering. return new Result(subResults, ordering); // depends on control dependency: [if], data = [none] } } // Create a super key which contains all the properties required for // total ordering. The goal here is to append these properties to the // ordering in a fashion that takes advantage of each index's natural // ordering. This in turn should cause any sort operation to operate // over smaller groups. Smaller groups means smaller sort buffers. // Smaller sort buffers makes a merge sort happy. // Super key could be stored simply in a set, but a map makes it // convenient for tracking tallies. Map<ChainedProperty<S>, Tally> superKey = new LinkedHashMap<ChainedProperty<S>, Tally>(); for (Set<ChainedProperty<S>> key : keys) { for (ChainedProperty<S> property : key) { if (!superKey.containsKey(property)) { superKey.put(property, new Tally(property)); // depends on control dependency: [if], data = [none] } } } // Keep looping until total ordering achieved. while (true) { // For each ordering score, iterate over the entire unused ordering // properties and select the next free property. If property is in // the super key increment a tally associated with property // direction. Choose the property with the best tally and augment // the orderings with it and create new sub-results. Remove the // property from the super key and the key set. If any key is now // fully covered, a total ordering has been achieved. for (IndexedQueryAnalyzer<S>.Result result : subResults) { OrderingScore<S> score = result.getCompositeScore().getOrderingScore(); OrderingList<S> unused = score.getUnusedOrdering(); if (unused.size() > 0) { for (OrderedProperty<S> prop : unused) { ChainedProperty<S> chainedProp = prop.getChainedProperty(); Tally tally = superKey.get(chainedProp); if (tally != null) { tally.increment(prop.getDirection()); // depends on control dependency: [if], data = [none] } } } OrderingList<S> free = score.getFreeOrdering(); if (free.size() > 0) { OrderedProperty<S> prop = free.get(0); ChainedProperty<S> chainedProp = prop.getChainedProperty(); Tally tally = superKey.get(chainedProp); if (tally != null) { tally.increment(prop.getDirection()); // depends on control dependency: [if], data = [none] } } } Tally best = bestTally(superKey.values()); ChainedProperty<S> bestProperty = best.getProperty(); // Now augment the orderings and create new sub-results. ordering = ordering.concat(OrderedProperty.get(bestProperty, best.getBestDirection())); subResults = splitIntoSubResults(filter, ordering, hints); if (subResults.size() <= 1) { // Total ordering no longer required. break; } // Remove property from super key and key set... superKey.remove(bestProperty); if (superKey.size() == 0) { break; } if (pruneKeys(keys, bestProperty)) { break; } // Clear the tallies for the next run. for (Tally tally : superKey.values()) { tally.clear(); // depends on control dependency: [for], data = [tally] } } return new Result(subResults, ordering); } }
public class class_name { private Duration getRangeDurationWholeDay(ProjectCalendar projectCalendar, TimescaleUnits rangeUnits, DateRange range, List<TimephasedWork> assignments, int startIndex) { // option 1: // Our date range starts before the start of the TRA at the start index. // We can guarantee that we don't need to look at any earlier TRA blocks so just start here // option 2: // Our date range starts at the same point as the first TRA: do nothing... // option 3: // Our date range starts somewhere inside the first TRA... // if it's option 1 just set the start date to the start of the TRA block // for everything else we just use the start date of our date range. // start counting forwards one day at a time until we reach the end of // the date range, or until we reach the end of the block. // if we have not reached the end of the range, move to the next block and // see if the date range overlaps it. if it does not overlap, then we're // done. // if it does overlap, then move to the next block and repeat int totalDays = 0; double totalWork = 0; TimephasedWork assignment = assignments.get(startIndex); boolean done = false; do { // // Select the correct start date // long startDate = range.getStart().getTime(); long assignmentStart = assignment.getStart().getTime(); if (startDate < assignmentStart) { startDate = assignmentStart; } long rangeEndDate = range.getEnd().getTime(); long traEndDate = assignment.getFinish().getTime(); Calendar cal = DateHelper.popCalendar(startDate); Date calendarDate = cal.getTime(); // // Start counting forwards // while (startDate < rangeEndDate && startDate < traEndDate) { if (projectCalendar == null || projectCalendar.isWorkingDate(calendarDate)) { ++totalDays; } cal.add(Calendar.DAY_OF_YEAR, 1); startDate = cal.getTimeInMillis(); calendarDate = cal.getTime(); } DateHelper.pushCalendar(cal); // // If we still haven't reached the end of our range // check to see if the next TRA can be used. // done = true; totalWork += (assignment.getAmountPerDay().getDuration() * totalDays); if (startDate < rangeEndDate) { ++startIndex; if (startIndex < assignments.size()) { assignment = assignments.get(startIndex); totalDays = 0; done = false; } } } while (!done); return Duration.getInstance(totalWork, assignment.getAmountPerDay().getUnits()); } }
public class class_name { private Duration getRangeDurationWholeDay(ProjectCalendar projectCalendar, TimescaleUnits rangeUnits, DateRange range, List<TimephasedWork> assignments, int startIndex) { // option 1: // Our date range starts before the start of the TRA at the start index. // We can guarantee that we don't need to look at any earlier TRA blocks so just start here // option 2: // Our date range starts at the same point as the first TRA: do nothing... // option 3: // Our date range starts somewhere inside the first TRA... // if it's option 1 just set the start date to the start of the TRA block // for everything else we just use the start date of our date range. // start counting forwards one day at a time until we reach the end of // the date range, or until we reach the end of the block. // if we have not reached the end of the range, move to the next block and // see if the date range overlaps it. if it does not overlap, then we're // done. // if it does overlap, then move to the next block and repeat int totalDays = 0; double totalWork = 0; TimephasedWork assignment = assignments.get(startIndex); boolean done = false; do { // // Select the correct start date // long startDate = range.getStart().getTime(); long assignmentStart = assignment.getStart().getTime(); if (startDate < assignmentStart) { startDate = assignmentStart; // depends on control dependency: [if], data = [none] } long rangeEndDate = range.getEnd().getTime(); long traEndDate = assignment.getFinish().getTime(); Calendar cal = DateHelper.popCalendar(startDate); Date calendarDate = cal.getTime(); // // Start counting forwards // while (startDate < rangeEndDate && startDate < traEndDate) { if (projectCalendar == null || projectCalendar.isWorkingDate(calendarDate)) { ++totalDays; // depends on control dependency: [if], data = [none] } cal.add(Calendar.DAY_OF_YEAR, 1); // depends on control dependency: [while], data = [none] startDate = cal.getTimeInMillis(); // depends on control dependency: [while], data = [none] calendarDate = cal.getTime(); // depends on control dependency: [while], data = [none] } DateHelper.pushCalendar(cal); // // If we still haven't reached the end of our range // check to see if the next TRA can be used. // done = true; totalWork += (assignment.getAmountPerDay().getDuration() * totalDays); if (startDate < rangeEndDate) { ++startIndex; // depends on control dependency: [if], data = [none] if (startIndex < assignments.size()) { assignment = assignments.get(startIndex); // depends on control dependency: [if], data = [(startIndex] totalDays = 0; // depends on control dependency: [if], data = [none] done = false; // depends on control dependency: [if], data = [none] } } } while (!done); return Duration.getInstance(totalWork, assignment.getAmountPerDay().getUnits()); } }
public class class_name { public void drawMarker(final ToggleButton dragged, final double x, final double y) { final int draggedIdx = getBox().getChildren().indexOf(dragged); final int markerIdx = getBox().getChildren().indexOf(this.marker); int idx = 0; Node tempHoverNode = null; final int xx = 0; for (final Node n : getBox().getChildren()) { if (n.getBoundsInParent().contains(x, y)) { tempHoverNode = n; break; } if (n != this.marker) { idx++; } } if (tempHoverNode == this.hoverNode) { return; } else { this.hoverNode = tempHoverNode; } System.out.println("marker" + markerIdx + " idx " + idx); if (markerIdx != idx) { if (this.marker != null) { getBox().getChildren().remove(this.marker); } if (draggedIdx != idx) { this.marker = model().object().orientation() == TabbedPaneOrientation.bottom || model().object().orientation() == TabbedPaneOrientation.top ? new Rectangle(10, getBox().getHeight()) : new Rectangle(getBox().getWidth(), 4); this.marker.setFill(Color.LIGHTGREEN); getBox().getChildren().add(idx, this.marker); } } } }
public class class_name { public void drawMarker(final ToggleButton dragged, final double x, final double y) { final int draggedIdx = getBox().getChildren().indexOf(dragged); final int markerIdx = getBox().getChildren().indexOf(this.marker); int idx = 0; Node tempHoverNode = null; final int xx = 0; for (final Node n : getBox().getChildren()) { if (n.getBoundsInParent().contains(x, y)) { tempHoverNode = n; // depends on control dependency: [if], data = [none] break; } if (n != this.marker) { idx++; // depends on control dependency: [if], data = [none] } } if (tempHoverNode == this.hoverNode) { return; // depends on control dependency: [if], data = [none] } else { this.hoverNode = tempHoverNode; // depends on control dependency: [if], data = [none] } System.out.println("marker" + markerIdx + " idx " + idx); if (markerIdx != idx) { if (this.marker != null) { getBox().getChildren().remove(this.marker); // depends on control dependency: [if], data = [(this.marker] } if (draggedIdx != idx) { this.marker = model().object().orientation() == TabbedPaneOrientation.bottom || model().object().orientation() == TabbedPaneOrientation.top ? new Rectangle(10, getBox().getHeight()) : new Rectangle(getBox().getWidth(), 4); // depends on control dependency: [if], data = [none] this.marker.setFill(Color.LIGHTGREEN); // depends on control dependency: [if], data = [none] getBox().getChildren().add(idx, this.marker); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public static int copy(InputStream is, OutputStream os, boolean closeOs) { if (closeOs) { return write(is).ensureCloseSink().to(os); } else { return write(is).to(os); } } }
public class class_name { public static int copy(InputStream is, OutputStream os, boolean closeOs) { if (closeOs) { return write(is).ensureCloseSink().to(os); // depends on control dependency: [if], data = [none] } else { return write(is).to(os); // depends on control dependency: [if], data = [none] } } }
public class class_name { private CallStatus beforePromotion() { NodeEngineImpl nodeEngine = (NodeEngineImpl) getNodeEngine(); InternalOperationService operationService = nodeEngine.getOperationService(); InternalPartitionServiceImpl partitionService = getService(); if (!partitionService.getMigrationManager().acquirePromotionPermit()) { throw new RetryableHazelcastException("Another promotion is being run currently. " + "This is only expected when promotion is retried to an unresponsive destination."); } ILogger logger = getLogger(); int partitionStateVersion = partitionService.getPartitionStateVersion(); if (partitionState.getVersion() <= partitionStateVersion) { logger.warning("Already applied promotions to the partition state. Promotion state version: " + partitionState.getVersion() + ", current version: " + partitionStateVersion); partitionService.getMigrationManager().releasePromotionPermit(); success = true; return CallStatus.DONE_RESPONSE; } partitionService.getInternalMigrationListener().onPromotionStart(MigrationParticipant.DESTINATION, promotions); if (logger.isFineEnabled()) { logger.fine("Submitting BeforePromotionOperations for " + promotions.size() + " promotions. " + "Promotion partition state version: " + partitionState.getVersion() + ", current partition state version: " + partitionStateVersion); } Runnable beforePromotionsCallback = new BeforePromotionOperationCallback(this, new AtomicInteger(promotions.size())); for (MigrationInfo promotion : promotions) { if (logger.isFinestEnabled()) { logger.finest("Submitting BeforePromotionOperation for promotion: " + promotion); } BeforePromotionOperation op = new BeforePromotionOperation(promotion, beforePromotionsCallback); op.setPartitionId(promotion.getPartitionId()).setNodeEngine(nodeEngine).setService(partitionService); operationService.execute(op); } return CallStatus.DONE_VOID; } }
public class class_name { private CallStatus beforePromotion() { NodeEngineImpl nodeEngine = (NodeEngineImpl) getNodeEngine(); InternalOperationService operationService = nodeEngine.getOperationService(); InternalPartitionServiceImpl partitionService = getService(); if (!partitionService.getMigrationManager().acquirePromotionPermit()) { throw new RetryableHazelcastException("Another promotion is being run currently. " + "This is only expected when promotion is retried to an unresponsive destination."); } ILogger logger = getLogger(); int partitionStateVersion = partitionService.getPartitionStateVersion(); if (partitionState.getVersion() <= partitionStateVersion) { logger.warning("Already applied promotions to the partition state. Promotion state version: " + partitionState.getVersion() + ", current version: " + partitionStateVersion); // depends on control dependency: [if], data = [none] partitionService.getMigrationManager().releasePromotionPermit(); // depends on control dependency: [if], data = [none] success = true; // depends on control dependency: [if], data = [none] return CallStatus.DONE_RESPONSE; // depends on control dependency: [if], data = [none] } partitionService.getInternalMigrationListener().onPromotionStart(MigrationParticipant.DESTINATION, promotions); if (logger.isFineEnabled()) { logger.fine("Submitting BeforePromotionOperations for " + promotions.size() + " promotions. " + "Promotion partition state version: " + partitionState.getVersion() + ", current partition state version: " + partitionStateVersion); // depends on control dependency: [if], data = [none] } Runnable beforePromotionsCallback = new BeforePromotionOperationCallback(this, new AtomicInteger(promotions.size())); for (MigrationInfo promotion : promotions) { if (logger.isFinestEnabled()) { logger.finest("Submitting BeforePromotionOperation for promotion: " + promotion); // depends on control dependency: [if], data = [none] } BeforePromotionOperation op = new BeforePromotionOperation(promotion, beforePromotionsCallback); op.setPartitionId(promotion.getPartitionId()).setNodeEngine(nodeEngine).setService(partitionService); // depends on control dependency: [for], data = [promotion] operationService.execute(op); // depends on control dependency: [for], data = [none] } return CallStatus.DONE_VOID; } }
public class class_name { public HadoopJarStepConfig newInstallPigStep(String... pigVersions) { if (pigVersions != null && pigVersions.length > 0) { return newHivePigStep("pig", "--install-pig", "--pig-versions", StringUtils.join(",", pigVersions)); } return newHivePigStep("pig", "--install-pig", "--pig-versions", "latest"); } }
public class class_name { public HadoopJarStepConfig newInstallPigStep(String... pigVersions) { if (pigVersions != null && pigVersions.length > 0) { return newHivePigStep("pig", "--install-pig", "--pig-versions", StringUtils.join(",", pigVersions)); // depends on control dependency: [if], data = [none] } return newHivePigStep("pig", "--install-pig", "--pig-versions", "latest"); } }
public class class_name { public void initBinaryChunks(Base64Variant v, CharArrayBase64Decoder dec, boolean firstChunk) { if (mInputStart < 0) { // non-shared dec.init(v, firstChunk, mCurrentSegment, 0, mCurrentSize, mSegments); } else { // shared dec.init(v, firstChunk, mInputBuffer, mInputStart, mInputLen, null); } } }
public class class_name { public void initBinaryChunks(Base64Variant v, CharArrayBase64Decoder dec, boolean firstChunk) { if (mInputStart < 0) { // non-shared dec.init(v, firstChunk, mCurrentSegment, 0, mCurrentSize, mSegments); // depends on control dependency: [if], data = [none] } else { // shared dec.init(v, firstChunk, mInputBuffer, mInputStart, mInputLen, null); // depends on control dependency: [if], data = [none] } } }
public class class_name { public JobEnableOptions withIfUnmodifiedSince(DateTime ifUnmodifiedSince) { if (ifUnmodifiedSince == null) { this.ifUnmodifiedSince = null; } else { this.ifUnmodifiedSince = new DateTimeRfc1123(ifUnmodifiedSince); } return this; } }
public class class_name { public JobEnableOptions withIfUnmodifiedSince(DateTime ifUnmodifiedSince) { if (ifUnmodifiedSince == null) { this.ifUnmodifiedSince = null; // depends on control dependency: [if], data = [none] } else { this.ifUnmodifiedSince = new DateTimeRfc1123(ifUnmodifiedSince); // depends on control dependency: [if], data = [(ifUnmodifiedSince] } return this; } }
public class class_name { public void writeClassInterface() { Record recClassInfo = this.getMainRecord(); String strClassName = recClassInfo.getField(ClassInfo.CLASS_NAME).getString(); String strBaseClass = recClassInfo.getField(ClassInfo.BASE_CLASS_NAME).getString(); String strClassDesc = recClassInfo.getField(ClassInfo.CLASS_DESC).getString(); String strClassInterface = recClassInfo.getField(ClassInfo.CLASS_IMPLEMENTS).getString(); String implementsClass = null; if (((ClassInfo)recClassInfo).isARecord(false)) implementsClass = strClassName + "Model"; if ((implementsClass != null) && (implementsClass.length() > 0)) { m_IncludeNameList.addInclude(this.getPackage(CodeType.INTERFACE), null); // Make sure this is included if ((strClassInterface == null) || (strClassInterface.length() == 0)) strClassInterface = implementsClass; else strClassInterface = implementsClass + ", " + strClassInterface; } m_IncludeNameList.addInclude(strBaseClass, null); // Make sure this is included m_StreamOut.writeit("\n/**\n *\t" + strClassName + " - " + strClassDesc + ".\n */\n"); if ((strClassInterface == null) || (strClassInterface.length() == 0)) strClassInterface = ""; else strClassInterface = "\n\t implements " + strClassInterface; String strClassType = "class"; if ("interface".equals(recClassInfo.getField(ClassInfo.CLASS_TYPE).toString())) strClassType = "interface"; String strExtends = " extends "; if (strBaseClass.length() == 0) strExtends = ""; m_StreamOut.writeit("public " + strClassType + " " + strClassName + strExtends + strBaseClass + strClassInterface + "\n{\n"); m_StreamOut.setTabs(+1); } }
public class class_name { public void writeClassInterface() { Record recClassInfo = this.getMainRecord(); String strClassName = recClassInfo.getField(ClassInfo.CLASS_NAME).getString(); String strBaseClass = recClassInfo.getField(ClassInfo.BASE_CLASS_NAME).getString(); String strClassDesc = recClassInfo.getField(ClassInfo.CLASS_DESC).getString(); String strClassInterface = recClassInfo.getField(ClassInfo.CLASS_IMPLEMENTS).getString(); String implementsClass = null; if (((ClassInfo)recClassInfo).isARecord(false)) implementsClass = strClassName + "Model"; if ((implementsClass != null) && (implementsClass.length() > 0)) { m_IncludeNameList.addInclude(this.getPackage(CodeType.INTERFACE), null); // Make sure this is included // depends on control dependency: [if], data = [none] if ((strClassInterface == null) || (strClassInterface.length() == 0)) strClassInterface = implementsClass; else strClassInterface = implementsClass + ", " + strClassInterface; } m_IncludeNameList.addInclude(strBaseClass, null); // Make sure this is included m_StreamOut.writeit("\n/**\n *\t" + strClassName + " - " + strClassDesc + ".\n */\n"); if ((strClassInterface == null) || (strClassInterface.length() == 0)) strClassInterface = ""; else strClassInterface = "\n\t implements " + strClassInterface; String strClassType = "class"; if ("interface".equals(recClassInfo.getField(ClassInfo.CLASS_TYPE).toString())) strClassType = "interface"; String strExtends = " extends "; if (strBaseClass.length() == 0) strExtends = ""; m_StreamOut.writeit("public " + strClassType + " " + strClassName + strExtends + strBaseClass + strClassInterface + "\n{\n"); m_StreamOut.setTabs(+1); } }
public class class_name { public static Collection<String> split(final String value) { if (value == null) { return Collections.emptyList(); } final String[] tokens = value.trim().split("\\s+"); return asList(tokens); } }
public class class_name { public static Collection<String> split(final String value) { if (value == null) { return Collections.emptyList(); // depends on control dependency: [if], data = [none] } final String[] tokens = value.trim().split("\\s+"); return asList(tokens); } }
public class class_name { private String makeMessageForParseException(ParseException exception) { final StringBuilder sb = new StringBuilder("Parse error. Found "); final StringBuilder expected = new StringBuilder(); int maxExpectedTokenSequenceLength = 0; TreeSet<String> sortedOptions = new TreeSet<>(); for (int i = 0; i < exception.expectedTokenSequences.length; i++) { if (maxExpectedTokenSequenceLength < exception.expectedTokenSequences[i].length) { maxExpectedTokenSequenceLength = exception.expectedTokenSequences[i].length; } for (int j = 0; j < exception.expectedTokenSequences[i].length; j++) { sortedOptions.add(exception.tokenImage[exception.expectedTokenSequences[i][j]]); } } for (String option : sortedOptions) { expected.append(" ").append(option); } sb.append(""); Token token = exception.currentToken.next; for (int i = 0; i < maxExpectedTokenSequenceLength; i++) { String tokenText = token.image; String escapedTokenText = ParseException.add_escapes(tokenText); if (i != 0) { sb.append(" "); } if (token.kind == 0) { sb.append(exception.tokenImage[0]); break; } escapedTokenText = "\"" + escapedTokenText + "\""; String image = exception.tokenImage[token.kind]; if (image.equals(escapedTokenText)) { sb.append(image); } else { sb.append(" ") .append(escapedTokenText) .append(" ") .append(image); } token = token.next; } if (exception.expectedTokenSequences.length != 0) { int numExpectedTokens = exception.expectedTokenSequences.length; sb.append(", expected") .append(numExpectedTokens == 1 ? "" : " one of ") .append(expected.toString()); } return sb.toString(); } }
public class class_name { private String makeMessageForParseException(ParseException exception) { final StringBuilder sb = new StringBuilder("Parse error. Found "); final StringBuilder expected = new StringBuilder(); int maxExpectedTokenSequenceLength = 0; TreeSet<String> sortedOptions = new TreeSet<>(); for (int i = 0; i < exception.expectedTokenSequences.length; i++) { if (maxExpectedTokenSequenceLength < exception.expectedTokenSequences[i].length) { maxExpectedTokenSequenceLength = exception.expectedTokenSequences[i].length; // depends on control dependency: [if], data = [none] } for (int j = 0; j < exception.expectedTokenSequences[i].length; j++) { sortedOptions.add(exception.tokenImage[exception.expectedTokenSequences[i][j]]); // depends on control dependency: [for], data = [j] } } for (String option : sortedOptions) { expected.append(" ").append(option); // depends on control dependency: [for], data = [option] } sb.append(""); Token token = exception.currentToken.next; for (int i = 0; i < maxExpectedTokenSequenceLength; i++) { String tokenText = token.image; String escapedTokenText = ParseException.add_escapes(tokenText); if (i != 0) { sb.append(" "); // depends on control dependency: [if], data = [none] } if (token.kind == 0) { sb.append(exception.tokenImage[0]); // depends on control dependency: [if], data = [none] break; } escapedTokenText = "\"" + escapedTokenText + "\""; // depends on control dependency: [for], data = [none] String image = exception.tokenImage[token.kind]; if (image.equals(escapedTokenText)) { sb.append(image); // depends on control dependency: [if], data = [none] } else { sb.append(" ") .append(escapedTokenText) .append(" ") .append(image); // depends on control dependency: [if], data = [none] } token = token.next; // depends on control dependency: [for], data = [none] } if (exception.expectedTokenSequences.length != 0) { int numExpectedTokens = exception.expectedTokenSequences.length; sb.append(", expected") .append(numExpectedTokens == 1 ? "" : " one of ") .append(expected.toString()); // depends on control dependency: [if], data = [none] } return sb.toString(); } }
public class class_name { public String toAgentArg() { StringBuilder arg = new StringBuilder(); for (Map.Entry<String,String> entry : options.entrySet()) { String key = entry.getKey(); if (!key.equals("quiet") && !key.equals("verbose")) { arg.append(key).append("=").append(EscapeUtil.escape(entry.getValue(),EscapeUtil.CSV_ESCAPE,",")).append(","); } } return arg.length() > 0 ? arg.substring(0,arg.length() - 1) : ""; } }
public class class_name { public String toAgentArg() { StringBuilder arg = new StringBuilder(); for (Map.Entry<String,String> entry : options.entrySet()) { String key = entry.getKey(); if (!key.equals("quiet") && !key.equals("verbose")) { arg.append(key).append("=").append(EscapeUtil.escape(entry.getValue(),EscapeUtil.CSV_ESCAPE,",")).append(","); // depends on control dependency: [if], data = [none] } } return arg.length() > 0 ? arg.substring(0,arg.length() - 1) : ""; } }
public class class_name { public void removeNodeStateListener(NodeStateListener listener) { stateCheck(State.CREATED, State.RUNNING, State.SHUTTING_DOWN, State.QUEUING); try { stateListeners.remove(listener); nodeListLock.readLock().lock(); for (RiakNode node : nodeList) { node.removeStateListener(listener); } } finally { nodeListLock.readLock().unlock(); } } }
public class class_name { public void removeNodeStateListener(NodeStateListener listener) { stateCheck(State.CREATED, State.RUNNING, State.SHUTTING_DOWN, State.QUEUING); try { stateListeners.remove(listener); // depends on control dependency: [try], data = [none] nodeListLock.readLock().lock(); // depends on control dependency: [try], data = [none] for (RiakNode node : nodeList) { node.removeStateListener(listener); // depends on control dependency: [for], data = [node] } } finally { nodeListLock.readLock().unlock(); } } }
public class class_name { Stream<String> writeTimeGauge(TimeGauge gauge) { Double value = gauge.value(getBaseTimeUnit()); if (Double.isFinite(value)) { return Stream.of(event(gauge.getId(), new Attribute("value", value))); } return Stream.empty(); } }
public class class_name { Stream<String> writeTimeGauge(TimeGauge gauge) { Double value = gauge.value(getBaseTimeUnit()); if (Double.isFinite(value)) { return Stream.of(event(gauge.getId(), new Attribute("value", value))); // depends on control dependency: [if], data = [none] } return Stream.empty(); } }
public class class_name { @Override public void tableChanged(final TableModelEvent e) { if (e.getFirstRow() == TableModelEvent.HEADER_ROW) { return; // Do not respond to changes in the number of columns here, only row and data changes. } // ColumnWidthsResizer requires that the internal Swing TableModelListeners that update the JTable view // run their updates BEFORE it does its thing. Unfortunately, the order in which listeners are notified is // undefined (see https://weblogs.java.net/blog/alexfromsun/archive/2011/06/15/swing-better-world-listeners). // As a work-around, we're going to place the resize operation at the end of the event queue. That way, it'll // be executed after all TableModelListeners have been notified. EventQueue.invokeLater(new Runnable() { @Override public void run() { // Do not cache the value of doFullScan; we need to reevaluate each time because the number of rows in // the table could have changed. boolean doFullScan = table.getRowCount() <= fullScanCutoff; if (e.getColumn() == TableModelEvent.ALL_COLUMNS) { resize(table, doFullScan); // Resize all columns. } else { resize(table, e.getColumn(), doFullScan); // Resize only the affected column. } } }); } }
public class class_name { @Override public void tableChanged(final TableModelEvent e) { if (e.getFirstRow() == TableModelEvent.HEADER_ROW) { return; // Do not respond to changes in the number of columns here, only row and data changes. // depends on control dependency: [if], data = [none] } // ColumnWidthsResizer requires that the internal Swing TableModelListeners that update the JTable view // run their updates BEFORE it does its thing. Unfortunately, the order in which listeners are notified is // undefined (see https://weblogs.java.net/blog/alexfromsun/archive/2011/06/15/swing-better-world-listeners). // As a work-around, we're going to place the resize operation at the end of the event queue. That way, it'll // be executed after all TableModelListeners have been notified. EventQueue.invokeLater(new Runnable() { @Override public void run() { // Do not cache the value of doFullScan; we need to reevaluate each time because the number of rows in // the table could have changed. boolean doFullScan = table.getRowCount() <= fullScanCutoff; if (e.getColumn() == TableModelEvent.ALL_COLUMNS) { resize(table, doFullScan); // Resize all columns. // depends on control dependency: [if], data = [none] } else { resize(table, e.getColumn(), doFullScan); // Resize only the affected column. // depends on control dependency: [if], data = [none] } } }); } }
public class class_name { public final DRL5Expressions.operator_key_return operator_key() throws RecognitionException { DRL5Expressions.operator_key_return retval = new DRL5Expressions.operator_key_return(); retval.start = input.LT(1); Token id=null; try { // src/main/resources/org/drools/compiler/lang/DRL5Expressions.g:771:3: ({...}? =>id= ID ) // src/main/resources/org/drools/compiler/lang/DRL5Expressions.g:771:10: {...}? =>id= ID { if ( !(((helper.isPluggableEvaluator(false)))) ) { if (state.backtracking>0) {state.failed=true; return retval;} throw new FailedPredicateException(input, "operator_key", "(helper.isPluggableEvaluator(false))"); } id=(Token)match(input,ID,FOLLOW_ID_in_operator_key4794); if (state.failed) return retval; if ( state.backtracking==0 ) { helper.emit(id, DroolsEditorType.KEYWORD); } } retval.stop = input.LT(-1); } catch (RecognitionException re) { throw re; } finally { // do for sure before leaving } return retval; } }
public class class_name { public final DRL5Expressions.operator_key_return operator_key() throws RecognitionException { DRL5Expressions.operator_key_return retval = new DRL5Expressions.operator_key_return(); retval.start = input.LT(1); Token id=null; try { // src/main/resources/org/drools/compiler/lang/DRL5Expressions.g:771:3: ({...}? =>id= ID ) // src/main/resources/org/drools/compiler/lang/DRL5Expressions.g:771:10: {...}? =>id= ID { if ( !(((helper.isPluggableEvaluator(false)))) ) { if (state.backtracking>0) {state.failed=true; return retval;} // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] throw new FailedPredicateException(input, "operator_key", "(helper.isPluggableEvaluator(false))"); } id=(Token)match(input,ID,FOLLOW_ID_in_operator_key4794); if (state.failed) return retval; if ( state.backtracking==0 ) { helper.emit(id, DroolsEditorType.KEYWORD); } // depends on control dependency: [if], data = [none] } retval.stop = input.LT(-1); } catch (RecognitionException re) { throw re; } finally { // do for sure before leaving } return retval; } }
public class class_name { private Set<String> getExcludes() { Set<String> result = new HashSet<String>(); for (String exclude : kp.getExcludes()) { String name = exclude + ".class"; String renamed = renames.get(name); result.add((renamed != null) ? renamed : name); } return result; } }
public class class_name { private Set<String> getExcludes() { Set<String> result = new HashSet<String>(); for (String exclude : kp.getExcludes()) { String name = exclude + ".class"; String renamed = renames.get(name); result.add((renamed != null) ? renamed : name); // depends on control dependency: [for], data = [none] } return result; } }
public class class_name { public static <T> Set<T> iteratorToSet(Iterator<T> iterator) { Set<T> set = new HashSet<>(); while (iterator.hasNext()) { set.add(iterator.next()); } return set; } }
public class class_name { public static <T> Set<T> iteratorToSet(Iterator<T> iterator) { Set<T> set = new HashSet<>(); while (iterator.hasNext()) { set.add(iterator.next()); // depends on control dependency: [while], data = [none] } return set; } }
public class class_name { public final void addValue(@NonNull final String value) { Condition.INSTANCE.ensureNotNull(value, "The value may not be null"); if (this.values != null) { if (this.values.add(value)) { if (persistSet(this.values)) { notifyChanged(); } } } else { Set<String> newValues = new HashSet<>(); newValues.add(value); setValues(newValues); } } }
public class class_name { public final void addValue(@NonNull final String value) { Condition.INSTANCE.ensureNotNull(value, "The value may not be null"); if (this.values != null) { if (this.values.add(value)) { if (persistSet(this.values)) { notifyChanged(); // depends on control dependency: [if], data = [none] } } } else { Set<String> newValues = new HashSet<>(); newValues.add(value); // depends on control dependency: [if], data = [none] setValues(newValues); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static boolean validatePath(String path) { if(path == null) { Log.w(TAG, "Uploading path not set"); return false; } try { uriFromString(path); return true; } catch(DataSinkException e) { return false; } } }
public class class_name { public static boolean validatePath(String path) { if(path == null) { Log.w(TAG, "Uploading path not set"); // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } try { uriFromString(path); // depends on control dependency: [try], data = [none] return true; // depends on control dependency: [try], data = [none] } catch(DataSinkException e) { return false; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void marshall(QuietTime quietTime, ProtocolMarshaller protocolMarshaller) { if (quietTime == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(quietTime.getEnd(), END_BINDING); protocolMarshaller.marshall(quietTime.getStart(), START_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(QuietTime quietTime, ProtocolMarshaller protocolMarshaller) { if (quietTime == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(quietTime.getEnd(), END_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(quietTime.getStart(), START_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { static int closestCorner4(Grid g ) { double bestDistance = g.get(0,0).center.normSq(); int bestIdx = 0; double d = g.get(0,g.columns-1).center.normSq(); if( d < bestDistance ) { bestDistance = d; bestIdx = 3; } d = g.get(g.rows-1,g.columns-1).center.normSq(); if( d < bestDistance ) { bestDistance = d; bestIdx = 2; } d = g.get(g.rows-1,0).center.normSq(); if( d < bestDistance ) { bestIdx = 1; } return bestIdx; } }
public class class_name { static int closestCorner4(Grid g ) { double bestDistance = g.get(0,0).center.normSq(); int bestIdx = 0; double d = g.get(0,g.columns-1).center.normSq(); if( d < bestDistance ) { bestDistance = d; // depends on control dependency: [if], data = [none] bestIdx = 3; // depends on control dependency: [if], data = [none] } d = g.get(g.rows-1,g.columns-1).center.normSq(); if( d < bestDistance ) { bestDistance = d; // depends on control dependency: [if], data = [none] bestIdx = 2; // depends on control dependency: [if], data = [none] } d = g.get(g.rows-1,0).center.normSq(); if( d < bestDistance ) { bestIdx = 1; // depends on control dependency: [if], data = [none] } return bestIdx; } }
public class class_name { private void deriveProjectCalendar() { // // Count the number of times each calendar is used // Map<ProjectCalendar, Integer> map = new HashMap<ProjectCalendar, Integer>(); for (Task task : m_project.getTasks()) { ProjectCalendar calendar = task.getCalendar(); Integer count = map.get(calendar); if (count == null) { count = Integer.valueOf(1); } else { count = Integer.valueOf(count.intValue() + 1); } map.put(calendar, count); } // // Find the most frequently used calendar // int maxCount = 0; ProjectCalendar defaultCalendar = null; for (Entry<ProjectCalendar, Integer> entry : map.entrySet()) { if (entry.getValue().intValue() > maxCount) { maxCount = entry.getValue().intValue(); defaultCalendar = entry.getKey(); } } // // Set the default calendar for the project // and remove it's use as a task-specific calendar. // if (defaultCalendar != null) { m_project.setDefaultCalendar(defaultCalendar); for (Task task : m_project.getTasks()) { if (task.getCalendar() == defaultCalendar) { task.setCalendar(null); } } } } }
public class class_name { private void deriveProjectCalendar() { // // Count the number of times each calendar is used // Map<ProjectCalendar, Integer> map = new HashMap<ProjectCalendar, Integer>(); for (Task task : m_project.getTasks()) { ProjectCalendar calendar = task.getCalendar(); Integer count = map.get(calendar); if (count == null) { count = Integer.valueOf(1); // depends on control dependency: [if], data = [none] } else { count = Integer.valueOf(count.intValue() + 1); // depends on control dependency: [if], data = [(count] } map.put(calendar, count); // depends on control dependency: [for], data = [none] } // // Find the most frequently used calendar // int maxCount = 0; ProjectCalendar defaultCalendar = null; for (Entry<ProjectCalendar, Integer> entry : map.entrySet()) { if (entry.getValue().intValue() > maxCount) { maxCount = entry.getValue().intValue(); // depends on control dependency: [if], data = [none] defaultCalendar = entry.getKey(); // depends on control dependency: [if], data = [none] } } // // Set the default calendar for the project // and remove it's use as a task-specific calendar. // if (defaultCalendar != null) { m_project.setDefaultCalendar(defaultCalendar); // depends on control dependency: [if], data = [(defaultCalendar] for (Task task : m_project.getTasks()) { if (task.getCalendar() == defaultCalendar) { task.setCalendar(null); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { public Observable<ServiceResponse<TransparentDataEncryptionInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String serverName, String databaseName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (serverName == null) { throw new IllegalArgumentException("Parameter serverName is required and cannot be null."); } if (databaseName == null) { throw new IllegalArgumentException("Parameter databaseName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } final String transparentDataEncryptionName = "current"; final TransparentDataEncryptionStatus status = null; TransparentDataEncryptionInner parameters = new TransparentDataEncryptionInner(); parameters.withStatus(null); return service.createOrUpdate(this.client.subscriptionId(), resourceGroupName, serverName, databaseName, transparentDataEncryptionName, this.client.apiVersion(), this.client.acceptLanguage(), parameters, this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<TransparentDataEncryptionInner>>>() { @Override public Observable<ServiceResponse<TransparentDataEncryptionInner>> call(Response<ResponseBody> response) { try { ServiceResponse<TransparentDataEncryptionInner> clientResponse = createOrUpdateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } }
public class class_name { public Observable<ServiceResponse<TransparentDataEncryptionInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String serverName, String databaseName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (serverName == null) { throw new IllegalArgumentException("Parameter serverName is required and cannot be null."); } if (databaseName == null) { throw new IllegalArgumentException("Parameter databaseName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } final String transparentDataEncryptionName = "current"; final TransparentDataEncryptionStatus status = null; TransparentDataEncryptionInner parameters = new TransparentDataEncryptionInner(); parameters.withStatus(null); return service.createOrUpdate(this.client.subscriptionId(), resourceGroupName, serverName, databaseName, transparentDataEncryptionName, this.client.apiVersion(), this.client.acceptLanguage(), parameters, this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<TransparentDataEncryptionInner>>>() { @Override public Observable<ServiceResponse<TransparentDataEncryptionInner>> call(Response<ResponseBody> response) { try { ServiceResponse<TransparentDataEncryptionInner> clientResponse = createOrUpdateDelegate(response); return Observable.just(clientResponse); // depends on control dependency: [try], data = [none] } catch (Throwable t) { return Observable.error(t); } // depends on control dependency: [catch], data = [none] } }); } }
public class class_name { public static void loadLibrary( String libraryName, String ... dependentLibraryNames) { logger.log(level, "Loading library: " + libraryName); // First, try to load the specified library as a file // that is visible in the default search path Throwable throwableFromFile; try { logger.log(level, "Loading library as a file"); System.loadLibrary(libraryName); logger.log(level, "Loading library as a file DONE"); return; } catch (Throwable t) { logger.log(level, "Loading library as a file FAILED"); throwableFromFile = t; } // Now try to load the library by extracting the // corresponding resource from the JAR file try { logger.log(level, "Loading library as a resource"); loadLibraryResource(LIBRARY_PATH_IN_JAR, libraryName, "", dependentLibraryNames); logger.log(level, "Loading library as a resource DONE"); return; } catch (Throwable throwableFromResource) { logger.log(level, "Loading library as a resource FAILED", throwableFromResource); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); pw.println("Error while loading native library \"" + libraryName + "\""); pw.println("Operating system name: "+ System.getProperty("os.name")); pw.println("Architecture : "+ System.getProperty("os.arch")); pw.println("Architecture bit size: "+ System.getProperty("sun.arch.data.model")); pw.println("---(start of nested stack traces)---"); pw.println("Stack trace from the attempt to " + "load the library as a file:"); throwableFromFile.printStackTrace(pw); pw.println("Stack trace from the attempt to " + "load the library as a resource:"); throwableFromResource.printStackTrace(pw); pw.println("---(end of nested stack traces)---"); pw.close(); throw new UnsatisfiedLinkError(sw.toString()); } } }
public class class_name { public static void loadLibrary( String libraryName, String ... dependentLibraryNames) { logger.log(level, "Loading library: " + libraryName); // First, try to load the specified library as a file // that is visible in the default search path Throwable throwableFromFile; try { logger.log(level, "Loading library as a file"); // depends on control dependency: [try], data = [none] System.loadLibrary(libraryName); // depends on control dependency: [try], data = [none] logger.log(level, "Loading library as a file DONE"); // depends on control dependency: [try], data = [none] return; // depends on control dependency: [try], data = [none] } catch (Throwable t) { logger.log(level, "Loading library as a file FAILED"); throwableFromFile = t; } // depends on control dependency: [catch], data = [none] // Now try to load the library by extracting the // corresponding resource from the JAR file try { logger.log(level, "Loading library as a resource"); loadLibraryResource(LIBRARY_PATH_IN_JAR, libraryName, "", dependentLibraryNames); logger.log(level, "Loading library as a resource DONE"); return; } catch (Throwable throwableFromResource) { logger.log(level, "Loading library as a resource FAILED", throwableFromResource); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); pw.println("Error while loading native library \"" + libraryName + "\""); pw.println("Operating system name: "+ System.getProperty("os.name")); pw.println("Architecture : "+ System.getProperty("os.arch")); pw.println("Architecture bit size: "+ System.getProperty("sun.arch.data.model")); pw.println("---(start of nested stack traces)---"); pw.println("Stack trace from the attempt to " + "load the library as a file:"); throwableFromFile.printStackTrace(pw); pw.println("Stack trace from the attempt to " + "load the library as a resource:"); throwableFromResource.printStackTrace(pw); pw.println("---(end of nested stack traces)---"); pw.close(); throw new UnsatisfiedLinkError(sw.toString()); } } }
public class class_name { private Class<?> getTemplateClass() { String fqName = getTargetPackage() + "." + getName(); try { mTemplateClass = getCompiler().loadClass(fqName); } catch (ClassNotFoundException nx) { try { mTemplateClass = getCompiler().loadClass(getName()); // Try standard path as a last resort } catch (ClassNotFoundException nx2) { return null; } } return mTemplateClass; } }
public class class_name { private Class<?> getTemplateClass() { String fqName = getTargetPackage() + "." + getName(); try { mTemplateClass = getCompiler().loadClass(fqName); } catch (ClassNotFoundException nx) { try { mTemplateClass = getCompiler().loadClass(getName()); // Try standard path as a last resort // depends on control dependency: [try], data = [none] } catch (ClassNotFoundException nx2) { return null; } // depends on control dependency: [catch], data = [none] } return mTemplateClass; } }
public class class_name { private void setLastOpenedGallery(CmsGallerySearchBean searchObject) { if ((searchObject.getGalleries() != null) && (searchObject.getGalleries().size() <= 1) // if the size is 0, the user has actively deselected the galleries, so we want to handle this case too && searchObject.haveGalleriesChanged()) { String galleryPath = searchObject.getGalleries().isEmpty() ? null : searchObject.getGalleries().get(0); CmsWorkplaceSettings settings = getWorkplaceSettings(); if (searchObject.getGalleryMode() == GalleryMode.adeView) { settings.setLastUsedGallery("" + GalleryMode.adeView, galleryPath); } else { String referencePath = searchObject.getReferencePath(); String referenceTypeName = ""; try { CmsObject cms = getCmsObject(); CmsResource referenceResource = cms.readResource(referencePath); I_CmsResourceType referenceType = OpenCms.getResourceManager().getResourceType(referenceResource); referenceTypeName = referenceType.getTypeName(); } catch (CmsException e) { LOG.error(e.getLocalizedMessage(), e); } settings.setLastUsedGallery( CmsGallerySearchBean.getGalleryStorageKey( searchObject.getGalleryStoragePrefix(), referenceTypeName), galleryPath); } } } }
public class class_name { private void setLastOpenedGallery(CmsGallerySearchBean searchObject) { if ((searchObject.getGalleries() != null) && (searchObject.getGalleries().size() <= 1) // if the size is 0, the user has actively deselected the galleries, so we want to handle this case too && searchObject.haveGalleriesChanged()) { String galleryPath = searchObject.getGalleries().isEmpty() ? null : searchObject.getGalleries().get(0); CmsWorkplaceSettings settings = getWorkplaceSettings(); if (searchObject.getGalleryMode() == GalleryMode.adeView) { settings.setLastUsedGallery("" + GalleryMode.adeView, galleryPath); // depends on control dependency: [if], data = [none] } else { String referencePath = searchObject.getReferencePath(); String referenceTypeName = ""; try { CmsObject cms = getCmsObject(); CmsResource referenceResource = cms.readResource(referencePath); I_CmsResourceType referenceType = OpenCms.getResourceManager().getResourceType(referenceResource); referenceTypeName = referenceType.getTypeName(); // depends on control dependency: [try], data = [none] } catch (CmsException e) { LOG.error(e.getLocalizedMessage(), e); } // depends on control dependency: [catch], data = [none] settings.setLastUsedGallery( CmsGallerySearchBean.getGalleryStorageKey( searchObject.getGalleryStoragePrefix(), referenceTypeName), galleryPath); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public void getPersistentData(ObjectOutputStream oos) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "getPersistentData", oos); try { HashMap hm = new HashMap(); hm.put("tick", Long.valueOf(_tick)); oos.writeObject(hm); } catch (IOException e) { FFDCFilter.processException( e, "com.ibm.ws.sib.processor.impl.store.items.AICompletedPrefixItem.getPersistentData", "1:129:1.18", this); SIErrorException e2 = new SIErrorException(e); SibTr.exception(tc, e2); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getPersistentData"); throw e2; } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getPersistentData"); } }
public class class_name { public void getPersistentData(ObjectOutputStream oos) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "getPersistentData", oos); try { HashMap hm = new HashMap(); hm.put("tick", Long.valueOf(_tick)); // depends on control dependency: [try], data = [none] oos.writeObject(hm); // depends on control dependency: [try], data = [none] } catch (IOException e) { FFDCFilter.processException( e, "com.ibm.ws.sib.processor.impl.store.items.AICompletedPrefixItem.getPersistentData", "1:129:1.18", this); SIErrorException e2 = new SIErrorException(e); SibTr.exception(tc, e2); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getPersistentData"); throw e2; } // depends on control dependency: [catch], data = [none] if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getPersistentData"); } }
public class class_name { public byte[] getObjectContent(GetObjectRequest request) { BosObjectInputStream content = this.getObject(request).getObjectContent(); try { return IOUtils.toByteArray(content); } catch (IOException e) { try { content.close(); } catch (IOException e1) { // ignore, throw e not e1. } throw new BceClientException("Fail read object content", e); } finally { try { content.close(); } catch (IOException e) { // ignore } } } }
public class class_name { public byte[] getObjectContent(GetObjectRequest request) { BosObjectInputStream content = this.getObject(request).getObjectContent(); try { return IOUtils.toByteArray(content); // depends on control dependency: [try], data = [none] } catch (IOException e) { try { content.close(); // depends on control dependency: [try], data = [none] } catch (IOException e1) { // ignore, throw e not e1. } // depends on control dependency: [catch], data = [none] throw new BceClientException("Fail read object content", e); } finally { // depends on control dependency: [catch], data = [none] try { content.close(); // depends on control dependency: [try], data = [none] } catch (IOException e) { // ignore } // depends on control dependency: [catch], data = [none] } } }
public class class_name { public final int getFirstAttribute(int nodeHandle) { int nodeID = makeNodeIdentity(nodeHandle); if (nodeID == DTM.NULL) return DTM.NULL; int type = _type2(nodeID); if (DTM.ELEMENT_NODE == type) { // Assume that attributes and namespaces immediately follow the element. while (true) { nodeID++; // Assume this can not be null. type = _type2(nodeID); if (type == DTM.ATTRIBUTE_NODE) { return makeNodeHandle(nodeID); } else if (DTM.NAMESPACE_NODE != type) { break; } } } return DTM.NULL; } }
public class class_name { public final int getFirstAttribute(int nodeHandle) { int nodeID = makeNodeIdentity(nodeHandle); if (nodeID == DTM.NULL) return DTM.NULL; int type = _type2(nodeID); if (DTM.ELEMENT_NODE == type) { // Assume that attributes and namespaces immediately follow the element. while (true) { nodeID++; // depends on control dependency: [while], data = [none] // Assume this can not be null. type = _type2(nodeID); // depends on control dependency: [while], data = [none] if (type == DTM.ATTRIBUTE_NODE) { return makeNodeHandle(nodeID); // depends on control dependency: [if], data = [none] } else if (DTM.NAMESPACE_NODE != type) { break; } } } return DTM.NULL; } }
public class class_name { private void cleanupIfNeccessary(JsonGetterContext excluded) { int cacheCount; while ((cacheCount = internalCache.size()) > maxContexts) { int sampleCount = Math.max(cacheCount - maxContexts, cleanupRemoveAtLeastItems) + 1; for (SamplingEntry sample: internalCache.getRandomSamples(sampleCount)) { if (excluded != sample.getEntryValue()) { internalCache.remove(sample.getEntryKey()); } } } } }
public class class_name { private void cleanupIfNeccessary(JsonGetterContext excluded) { int cacheCount; while ((cacheCount = internalCache.size()) > maxContexts) { int sampleCount = Math.max(cacheCount - maxContexts, cleanupRemoveAtLeastItems) + 1; for (SamplingEntry sample: internalCache.getRandomSamples(sampleCount)) { if (excluded != sample.getEntryValue()) { internalCache.remove(sample.getEntryKey()); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { @Deprecated protected void onFailure(AsyncRequest<D, RQ> asyncRequest, Throwable throwable) { for (AsyncRequest.Thunk thunk: asyncRequest.getThunks()) { thunk.callback.onFailure(throwable); } } }
public class class_name { @Deprecated protected void onFailure(AsyncRequest<D, RQ> asyncRequest, Throwable throwable) { for (AsyncRequest.Thunk thunk: asyncRequest.getThunks()) { thunk.callback.onFailure(throwable); // depends on control dependency: [for], data = [thunk] } } }
public class class_name { @SuppressWarnings("unchecked") @Override public void setCustomGlobalProcessors(Map<String, String> keysClassNames) { for (Entry<String, String> entry : keysClassNames.entrySet()) { GlobalProcessor<T> customGlobalPreprocessor = (GlobalProcessor<T>) ClassLoaderResourceUtils .buildObjectInstance((String) entry.getValue()); String key = (String) entry.getKey(); customPostprocessors.put(key, new CustomGlobalProcessorChainedWrapper<>(key, customGlobalPreprocessor)); } } }
public class class_name { @SuppressWarnings("unchecked") @Override public void setCustomGlobalProcessors(Map<String, String> keysClassNames) { for (Entry<String, String> entry : keysClassNames.entrySet()) { GlobalProcessor<T> customGlobalPreprocessor = (GlobalProcessor<T>) ClassLoaderResourceUtils .buildObjectInstance((String) entry.getValue()); String key = (String) entry.getKey(); customPostprocessors.put(key, new CustomGlobalProcessorChainedWrapper<>(key, customGlobalPreprocessor)); // depends on control dependency: [for], data = [none] } } }
public class class_name { public void awaitResponses(long timeout, TimeUnit unit) { Iterator<Entry<Integer, Entry<ChannelFuture, ChannelPromise>>> itr = streamidPromiseMap.entrySet().iterator(); while (itr.hasNext()) { Entry<Integer, Entry<ChannelFuture, ChannelPromise>> entry = itr.next(); ChannelFuture writeFuture = entry.getValue().getKey(); if (!writeFuture.awaitUninterruptibly(timeout, unit)) { throw new IllegalStateException("Timed out waiting to write for stream id " + entry.getKey()); } if (!writeFuture.isSuccess()) { throw new RuntimeException(writeFuture.cause()); } ChannelPromise promise = entry.getValue().getValue(); if (!promise.awaitUninterruptibly(timeout, unit)) { throw new IllegalStateException("Timed out waiting for response on stream id " + entry.getKey()); } if (!promise.isSuccess()) { throw new RuntimeException(promise.cause()); } System.out.println("---Stream id: " + entry.getKey() + " received---"); itr.remove(); } } }
public class class_name { public void awaitResponses(long timeout, TimeUnit unit) { Iterator<Entry<Integer, Entry<ChannelFuture, ChannelPromise>>> itr = streamidPromiseMap.entrySet().iterator(); while (itr.hasNext()) { Entry<Integer, Entry<ChannelFuture, ChannelPromise>> entry = itr.next(); ChannelFuture writeFuture = entry.getValue().getKey(); if (!writeFuture.awaitUninterruptibly(timeout, unit)) { throw new IllegalStateException("Timed out waiting to write for stream id " + entry.getKey()); } if (!writeFuture.isSuccess()) { throw new RuntimeException(writeFuture.cause()); } ChannelPromise promise = entry.getValue().getValue(); if (!promise.awaitUninterruptibly(timeout, unit)) { throw new IllegalStateException("Timed out waiting for response on stream id " + entry.getKey()); } if (!promise.isSuccess()) { throw new RuntimeException(promise.cause()); } System.out.println("---Stream id: " + entry.getKey() + " received---"); // depends on control dependency: [while], data = [none] itr.remove(); // depends on control dependency: [while], data = [none] } } }
public class class_name { public static String swapCase(String str) { int strLen; if (str == null || (strLen = str.length()) == 0) { return str; } StringBuilder buffer = new StringBuilder( strLen ); char ch = 0; for (int i = 0; i < strLen; i++) { ch = str.charAt(i); if (Character.isUpperCase(ch)) { ch = Character.toLowerCase(ch); } else if (Character.isTitleCase(ch)) { ch = Character.toLowerCase(ch); } else if (Character.isLowerCase(ch)) { ch = Character.toUpperCase(ch); } buffer.append(ch); } return buffer.toString(); } }
public class class_name { public static String swapCase(String str) { int strLen; if (str == null || (strLen = str.length()) == 0) { return str; // depends on control dependency: [if], data = [none] } StringBuilder buffer = new StringBuilder( strLen ); char ch = 0; for (int i = 0; i < strLen; i++) { ch = str.charAt(i); // depends on control dependency: [for], data = [i] if (Character.isUpperCase(ch)) { ch = Character.toLowerCase(ch); // depends on control dependency: [if], data = [none] } else if (Character.isTitleCase(ch)) { ch = Character.toLowerCase(ch); // depends on control dependency: [if], data = [none] } else if (Character.isLowerCase(ch)) { ch = Character.toUpperCase(ch); // depends on control dependency: [if], data = [none] } buffer.append(ch); // depends on control dependency: [for], data = [none] } return buffer.toString(); } }
public class class_name { public static String normalizePath(String path) { requireNonNull(path, "path"); if (path.isEmpty()) { return "/"; } if (!path.startsWith("/")) { path = '/' + path; } if (!path.endsWith("/")) { path += '/'; } return path.replaceAll("//+", "/"); } }
public class class_name { public static String normalizePath(String path) { requireNonNull(path, "path"); if (path.isEmpty()) { return "/"; // depends on control dependency: [if], data = [none] } if (!path.startsWith("/")) { path = '/' + path; // depends on control dependency: [if], data = [none] } if (!path.endsWith("/")) { path += '/'; // depends on control dependency: [if], data = [none] } return path.replaceAll("//+", "/"); } }
public class class_name { void build_feature_space() { kFormInFeaturespace = 0; kNilForm = forms_alphabet.idOf(SpecialOption.NIL); kFeatureSpaceEnd = forms_alphabet.size(); kPostagInFeaturespace = kFeatureSpaceEnd; kNilPostag = kFeatureSpaceEnd + postags_alphabet.idOf(SpecialOption.NIL); kFeatureSpaceEnd += postags_alphabet.size(); kDeprelInFeaturespace = kFeatureSpaceEnd; kNilDeprel = kFeatureSpaceEnd + deprels_alphabet.idOf(SpecialOption.NIL); kFeatureSpaceEnd += deprels_alphabet.size(); kDistanceInFeaturespace = kFeatureSpaceEnd; kNilDistance = kFeatureSpaceEnd + (use_distance ? 8 : 0); kFeatureSpaceEnd += (use_distance ? 9 : 0); kValencyInFeaturespace = kFeatureSpaceEnd; kNilValency = kFeatureSpaceEnd + (use_valency ? 8 : 0); kFeatureSpaceEnd += (use_valency ? 9 : 0); kCluster4InFeaturespace = kFeatureSpaceEnd; if (use_cluster) { kNilCluster4 = kFeatureSpaceEnd + cluster4_types_alphabet.idOf(SpecialOption.NIL); kFeatureSpaceEnd += cluster4_types_alphabet.size(); } else { kNilCluster4 = kFeatureSpaceEnd; } kCluster6InFeaturespace = kFeatureSpaceEnd; if (use_cluster) { kNilCluster6 = kFeatureSpaceEnd + cluster6_types_alphabet.idOf(SpecialOption.NIL); kFeatureSpaceEnd += cluster6_types_alphabet.size(); } else { kNilCluster6 = kFeatureSpaceEnd; } kClusterInFeaturespace = kFeatureSpaceEnd; if (use_cluster) { kNilCluster = kFeatureSpaceEnd + cluster_types_alphabet.idOf(SpecialOption.NIL); kFeatureSpaceEnd += cluster_types_alphabet.size(); } else { kNilCluster = kFeatureSpaceEnd; } } }
public class class_name { void build_feature_space() { kFormInFeaturespace = 0; kNilForm = forms_alphabet.idOf(SpecialOption.NIL); kFeatureSpaceEnd = forms_alphabet.size(); kPostagInFeaturespace = kFeatureSpaceEnd; kNilPostag = kFeatureSpaceEnd + postags_alphabet.idOf(SpecialOption.NIL); kFeatureSpaceEnd += postags_alphabet.size(); kDeprelInFeaturespace = kFeatureSpaceEnd; kNilDeprel = kFeatureSpaceEnd + deprels_alphabet.idOf(SpecialOption.NIL); kFeatureSpaceEnd += deprels_alphabet.size(); kDistanceInFeaturespace = kFeatureSpaceEnd; kNilDistance = kFeatureSpaceEnd + (use_distance ? 8 : 0); kFeatureSpaceEnd += (use_distance ? 9 : 0); kValencyInFeaturespace = kFeatureSpaceEnd; kNilValency = kFeatureSpaceEnd + (use_valency ? 8 : 0); kFeatureSpaceEnd += (use_valency ? 9 : 0); kCluster4InFeaturespace = kFeatureSpaceEnd; if (use_cluster) { kNilCluster4 = kFeatureSpaceEnd + cluster4_types_alphabet.idOf(SpecialOption.NIL); // depends on control dependency: [if], data = [none] kFeatureSpaceEnd += cluster4_types_alphabet.size(); // depends on control dependency: [if], data = [none] } else { kNilCluster4 = kFeatureSpaceEnd; // depends on control dependency: [if], data = [none] } kCluster6InFeaturespace = kFeatureSpaceEnd; if (use_cluster) { kNilCluster6 = kFeatureSpaceEnd + cluster6_types_alphabet.idOf(SpecialOption.NIL); // depends on control dependency: [if], data = [none] kFeatureSpaceEnd += cluster6_types_alphabet.size(); // depends on control dependency: [if], data = [none] } else { kNilCluster6 = kFeatureSpaceEnd; // depends on control dependency: [if], data = [none] } kClusterInFeaturespace = kFeatureSpaceEnd; if (use_cluster) { kNilCluster = kFeatureSpaceEnd + cluster_types_alphabet.idOf(SpecialOption.NIL); // depends on control dependency: [if], data = [none] kFeatureSpaceEnd += cluster_types_alphabet.size(); // depends on control dependency: [if], data = [none] } else { kNilCluster = kFeatureSpaceEnd; // depends on control dependency: [if], data = [none] } } }