code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
public void addOrderBy(String fieldName, boolean sortAscending)
{
if (fieldName != null)
{
m_orderby.add(new FieldHelper(fieldName, sortAscending));
}
} } | public class class_name {
public void addOrderBy(String fieldName, boolean sortAscending)
{
if (fieldName != null)
{
m_orderby.add(new FieldHelper(fieldName, sortAscending));
// depends on control dependency: [if], data = [(fieldName]
}
} } |
public class class_name {
public void add(long reportingHsid, SiteFailureMessage sfm) {
// skip if the reporting site did not belong to the pre
// failure mesh, or the reporting site is reporting itself
// dead, or none of the sites in the safe transaction map
// are among the known hsids
if ( !m_hsids.contains(reportingHsid)
|| !sfm.m_survivors.contains(reportingHsid)) return;
Set<Long> survivors = sfm.m_survivors;
if (Sets.filter(sfm.getObservedFailedSites(), in(m_hsids)).isEmpty()) {
survivors = m_hsids;
}
// dead = pre failure mesh - survivors
Set<Long> dead = Sets.difference(m_hsids, survivors);
removeValue(m_dead, reportingHsid);
// add dead graph nodes
for (long w: dead) {
if (!m_hsids.contains(w)) continue;
m_dead.put(w,reportingHsid);
}
// Remove all what the reporting site thought
// was alive before this invocation
removeValue(m_alive, reportingHsid);
// add alive graph nodes
for (long s: survivors) {
if (!m_hsids.contains(s)) continue;
m_alive.put(s, reportingHsid);
}
for (long s: sfm.getFailedSites()) {
if (!m_hsids.contains(s)) continue;
m_reported.put(s, reportingHsid);
}
} } | public class class_name {
public void add(long reportingHsid, SiteFailureMessage sfm) {
// skip if the reporting site did not belong to the pre
// failure mesh, or the reporting site is reporting itself
// dead, or none of the sites in the safe transaction map
// are among the known hsids
if ( !m_hsids.contains(reportingHsid)
|| !sfm.m_survivors.contains(reportingHsid)) return;
Set<Long> survivors = sfm.m_survivors;
if (Sets.filter(sfm.getObservedFailedSites(), in(m_hsids)).isEmpty()) {
survivors = m_hsids; // depends on control dependency: [if], data = [none]
}
// dead = pre failure mesh - survivors
Set<Long> dead = Sets.difference(m_hsids, survivors);
removeValue(m_dead, reportingHsid);
// add dead graph nodes
for (long w: dead) {
if (!m_hsids.contains(w)) continue;
m_dead.put(w,reportingHsid); // depends on control dependency: [for], data = [w]
}
// Remove all what the reporting site thought
// was alive before this invocation
removeValue(m_alive, reportingHsid);
// add alive graph nodes
for (long s: survivors) {
if (!m_hsids.contains(s)) continue;
m_alive.put(s, reportingHsid); // depends on control dependency: [for], data = [s]
}
for (long s: sfm.getFailedSites()) {
if (!m_hsids.contains(s)) continue;
m_reported.put(s, reportingHsid); // depends on control dependency: [for], data = [s]
}
} } |
public class class_name {
public void setListeners(java.util.Collection<Listener> listeners) {
if (listeners == null) {
this.listeners = null;
return;
}
this.listeners = new com.amazonaws.internal.SdkInternalList<Listener>(listeners);
} } | public class class_name {
public void setListeners(java.util.Collection<Listener> listeners) {
if (listeners == null) {
this.listeners = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.listeners = new com.amazonaws.internal.SdkInternalList<Listener>(listeners);
} } |
public class class_name {
@Override
protected void updateStackingContexts()
{
super.updateStackingContexts();
if (stackingParent != null)
{
if (formsStackingContext()) //all the positioned boxes are considered as separate stacking contexts
{
stackingParent.getStackingContext().registerChildContext(this);
if (scontext != null) //clear this context if it exists (remove old children)
scontext.clear();
}
}
} } | public class class_name {
@Override
protected void updateStackingContexts()
{
super.updateStackingContexts();
if (stackingParent != null)
{
if (formsStackingContext()) //all the positioned boxes are considered as separate stacking contexts
{
stackingParent.getStackingContext().registerChildContext(this); // depends on control dependency: [if], data = [none]
if (scontext != null) //clear this context if it exists (remove old children)
scontext.clear();
}
}
} } |
public class class_name {
private CompletableFuture<WriterFlushResult> reconcileOperation(StorageOperation op, SegmentProperties storageInfo, TimeoutTimer timer) {
CompletableFuture<WriterFlushResult> result;
if (isAppendOperation(op)) {
result = reconcileAppendOperation((AggregatedAppendOperation) op, storageInfo, timer);
} else if (op instanceof MergeSegmentOperation) {
result = reconcileMergeOperation((MergeSegmentOperation) op, storageInfo, timer);
} else if (op instanceof StreamSegmentSealOperation) {
result = reconcileSealOperation(storageInfo, timer.getRemaining());
} else if (isTruncateOperation(op)) {
// Nothing to reconcile here.
updateStatePostTruncate();
result = CompletableFuture.completedFuture(new WriterFlushResult());
} else {
result = Futures.failedFuture(new ReconciliationFailureException(String.format("Operation '%s' is not supported for reconciliation.", op), this.metadata, storageInfo));
}
return result;
} } | public class class_name {
private CompletableFuture<WriterFlushResult> reconcileOperation(StorageOperation op, SegmentProperties storageInfo, TimeoutTimer timer) {
CompletableFuture<WriterFlushResult> result;
if (isAppendOperation(op)) {
result = reconcileAppendOperation((AggregatedAppendOperation) op, storageInfo, timer); // depends on control dependency: [if], data = [none]
} else if (op instanceof MergeSegmentOperation) {
result = reconcileMergeOperation((MergeSegmentOperation) op, storageInfo, timer); // depends on control dependency: [if], data = [none]
} else if (op instanceof StreamSegmentSealOperation) {
result = reconcileSealOperation(storageInfo, timer.getRemaining()); // depends on control dependency: [if], data = [none]
} else if (isTruncateOperation(op)) {
// Nothing to reconcile here.
updateStatePostTruncate(); // depends on control dependency: [if], data = [none]
result = CompletableFuture.completedFuture(new WriterFlushResult()); // depends on control dependency: [if], data = [none]
} else {
result = Futures.failedFuture(new ReconciliationFailureException(String.format("Operation '%s' is not supported for reconciliation.", op), this.metadata, storageInfo)); // depends on control dependency: [if], data = [none]
}
return result;
} } |
public class class_name {
public DataSet loadFromMetaData(List<RecordMetaData> list) throws IOException {
if (underlying == null) {
SequenceRecord r = recordReader.loadSequenceFromMetaData(list.get(0));
initializeUnderlying(r);
}
//Two cases: single vs. multiple reader...
List<RecordMetaData> l = new ArrayList<>(list.size());
if (singleSequenceReaderMode) {
for (RecordMetaData m : list) {
l.add(new RecordMetaDataComposableMap(Collections.singletonMap(READER_KEY, m)));
}
} else {
for (RecordMetaData m : list) {
RecordMetaDataComposable rmdc = (RecordMetaDataComposable) m;
Map<String, RecordMetaData> map = new HashMap<>(2);
map.put(READER_KEY, rmdc.getMeta()[0]);
map.put(READER_KEY_LABEL, rmdc.getMeta()[1]);
l.add(new RecordMetaDataComposableMap(map));
}
}
return mdsToDataSet(underlying.loadFromMetaData(l));
} } | public class class_name {
public DataSet loadFromMetaData(List<RecordMetaData> list) throws IOException {
if (underlying == null) {
SequenceRecord r = recordReader.loadSequenceFromMetaData(list.get(0));
initializeUnderlying(r);
}
//Two cases: single vs. multiple reader...
List<RecordMetaData> l = new ArrayList<>(list.size());
if (singleSequenceReaderMode) {
for (RecordMetaData m : list) {
l.add(new RecordMetaDataComposableMap(Collections.singletonMap(READER_KEY, m))); // depends on control dependency: [for], data = [m]
}
} else {
for (RecordMetaData m : list) {
RecordMetaDataComposable rmdc = (RecordMetaDataComposable) m;
Map<String, RecordMetaData> map = new HashMap<>(2);
map.put(READER_KEY, rmdc.getMeta()[0]); // depends on control dependency: [for], data = [m]
map.put(READER_KEY_LABEL, rmdc.getMeta()[1]); // depends on control dependency: [for], data = [m]
l.add(new RecordMetaDataComposableMap(map)); // depends on control dependency: [for], data = [m]
}
}
return mdsToDataSet(underlying.loadFromMetaData(l));
} } |
public class class_name {
public void rawError(int pos, String msg) {
PrintWriter errWriter = writers.get(WriterKind.ERROR);
if (nerrors < MaxErrors && shouldReport(currentSourceFile(), pos)) {
printRawDiag(errWriter, "error: ", pos, msg);
prompt();
nerrors++;
}
errWriter.flush();
} } | public class class_name {
public void rawError(int pos, String msg) {
PrintWriter errWriter = writers.get(WriterKind.ERROR);
if (nerrors < MaxErrors && shouldReport(currentSourceFile(), pos)) {
printRawDiag(errWriter, "error: ", pos, msg); // depends on control dependency: [if], data = [none]
prompt(); // depends on control dependency: [if], data = [none]
nerrors++; // depends on control dependency: [if], data = [none]
}
errWriter.flush();
} } |
public class class_name {
public EEnum getIfcDataOriginEnum() {
if (ifcDataOriginEnumEEnum == null) {
ifcDataOriginEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(808);
}
return ifcDataOriginEnumEEnum;
} } | public class class_name {
public EEnum getIfcDataOriginEnum() {
if (ifcDataOriginEnumEEnum == null) {
ifcDataOriginEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(808);
// depends on control dependency: [if], data = [none]
}
return ifcDataOriginEnumEEnum;
} } |
public class class_name {
@Override
public T convert(S value) {
T defaultValue = getDefaultValue();
if (value == null && defaultValue != null) {
return defaultValue;
}
throw newConversionException("Cannot convert [%1$s] to [%2$s]",
value, getTargetType().map(Class::getName).orElse(Constants.UNKNOWN));
} } | public class class_name {
@Override
public T convert(S value) {
T defaultValue = getDefaultValue();
if (value == null && defaultValue != null) {
return defaultValue; // depends on control dependency: [if], data = [none]
}
throw newConversionException("Cannot convert [%1$s] to [%2$s]",
value, getTargetType().map(Class::getName).orElse(Constants.UNKNOWN));
} } |
public class class_name {
@Override
public void execute(FunctionContext<Object> functionContext)
{
try
{
String[] args = (String[])functionContext.getArguments();
if(args == null || args.length == 0)
throw new IllegalArgumentException("region argument required");
String regionName = args[0];
if(regionName == null || regionName.length() == 0)
throw new IllegalArgumentException("region name argument required");
Region<Serializable,Object> region = CacheFactory.getAnyInstance().getRegion(regionName);
if(region == null)
throw new IllegalArgumentException("region:"+regionName+" not found");
functionContext.getResultSender().lastResult(buildCheckSumMap(region));
}
catch (Exception e)
{
String stack = Debugger.stackTrace(e);
LogManager.getLogger(getClass()).error(stack);
throw new FunctionException(stack);
}
} } | public class class_name {
@Override
public void execute(FunctionContext<Object> functionContext)
{
try
{
String[] args = (String[])functionContext.getArguments();
if(args == null || args.length == 0)
throw new IllegalArgumentException("region argument required");
String regionName = args[0];
if(regionName == null || regionName.length() == 0)
throw new IllegalArgumentException("region name argument required");
Region<Serializable,Object> region = CacheFactory.getAnyInstance().getRegion(regionName);
if(region == null)
throw new IllegalArgumentException("region:"+regionName+" not found");
functionContext.getResultSender().lastResult(buildCheckSumMap(region)); // depends on control dependency: [try], data = [none]
}
catch (Exception e)
{
String stack = Debugger.stackTrace(e);
LogManager.getLogger(getClass()).error(stack);
throw new FunctionException(stack);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public TypeDescriptor narrow(Object value) {
if (value == null) {
return this;
}
return new TypeDescriptor(value.getClass(), this.elementTypeDescriptor,
this.mapKeyTypeDescriptor, this.mapValueTypeDescriptor, this.annotations);
} } | public class class_name {
public TypeDescriptor narrow(Object value) {
if (value == null) {
return this; // depends on control dependency: [if], data = [none]
}
return new TypeDescriptor(value.getClass(), this.elementTypeDescriptor,
this.mapKeyTypeDescriptor, this.mapValueTypeDescriptor, this.annotations);
} } |
public class class_name {
@GET
@Path("/mbeans")
public Response getMBeansByRegex(
@QueryParam("objNameRegex") String objNameRegex,
@QueryParam("jsonp") @DefaultValue("") String jsonp) {
try {
ObjectName objNameForRegex = new ObjectName(objNameRegex);
MBeanServer mBeanServer = ManagementFactory
.getPlatformMBeanServer();
Set<ObjectName> objs = mBeanServer
.queryNames(objNameForRegex, null);
JSONObject result = new JSONObject();
for (ObjectName objName : objs) {
JSONObject json = new JSONObject();
try {
json.put("attributes", emitAttributes(objName));
} catch (Exception e) {
json.put("attributes", emitAttributes(objName));
}
json.put("operations", emitOperations(objName));
result.put(objName.getCanonicalName(), json);
}
StringWriter out = new StringWriter();
if (jsonp.isEmpty()) {
result.write(out);
} else {
out.append(jsonp).append("(");
result.write(out);
out.append(");");
}
return Response.ok(out.toString()).type(MediaType.APPLICATION_JSON)
.build();
} catch (Exception e) {
LOG.error(
"Error while retrieving mbeans for regex:" + objNameRegex,
e);
return Response.serverError().entity(e.getMessage()).build();
}
} } | public class class_name {
@GET
@Path("/mbeans")
public Response getMBeansByRegex(
@QueryParam("objNameRegex") String objNameRegex,
@QueryParam("jsonp") @DefaultValue("") String jsonp) {
try {
ObjectName objNameForRegex = new ObjectName(objNameRegex);
MBeanServer mBeanServer = ManagementFactory
.getPlatformMBeanServer();
Set<ObjectName> objs = mBeanServer
.queryNames(objNameForRegex, null);
JSONObject result = new JSONObject();
for (ObjectName objName : objs) {
JSONObject json = new JSONObject();
try {
json.put("attributes", emitAttributes(objName)); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
json.put("attributes", emitAttributes(objName));
} // depends on control dependency: [catch], data = [none]
json.put("operations", emitOperations(objName));
result.put(objName.getCanonicalName(), json);
}
StringWriter out = new StringWriter();
if (jsonp.isEmpty()) {
result.write(out);
} else {
out.append(jsonp).append("(");
result.write(out);
out.append(");");
}
return Response.ok(out.toString()).type(MediaType.APPLICATION_JSON)
.build();
} catch (Exception e) {
LOG.error(
"Error while retrieving mbeans for regex:" + objNameRegex,
e);
return Response.serverError().entity(e.getMessage()).build();
}
} } |
public class class_name {
private void updateInstaceTypeDescriptionMap() {
// this.registeredHosts.values().iterator()
this.instanceTypeDescriptionMap.clear();
final List<InstanceTypeDescription> instanceTypeDescriptionList = new ArrayList<InstanceTypeDescription>();
// initialize array which stores the availability counter for each instance type
final int[] numberOfInstances = new int[this.availableInstanceTypes.length];
for (int i = 0; i < numberOfInstances.length; i++) {
numberOfInstances[i] = 0;
}
// Shuffle through instance types
for (int i = 0; i < this.availableInstanceTypes.length; i++) {
final InstanceType currentInstanceType = this.availableInstanceTypes[i];
int numberOfMatchingInstances = 0;
int minNumberOfCPUCores = Integer.MAX_VALUE;
long minSizeOfPhysicalMemory = Long.MAX_VALUE;
long minSizeOfFreeMemory = Long.MAX_VALUE;
final Iterator<ClusterInstance> it = this.registeredHosts.values().iterator();
while (it.hasNext()) {
final ClusterInstance clusterInstance = it.next();
if (clusterInstance.getType().equals(currentInstanceType)) {
++numberOfMatchingInstances;
final HardwareDescription hardwareDescription = clusterInstance.getHardwareDescription();
minNumberOfCPUCores = Math.min(minNumberOfCPUCores, hardwareDescription.getNumberOfCPUCores());
minSizeOfPhysicalMemory = Math.min(minSizeOfPhysicalMemory,
hardwareDescription.getSizeOfPhysicalMemory());
minSizeOfFreeMemory = Math.min(minSizeOfFreeMemory, hardwareDescription.getSizeOfFreeMemory());
}
}
// Update number of instances
int highestAccommodationNumber = -1;
int highestAccommodationIndex = -1;
for (int j = 0; j < this.availableInstanceTypes.length; j++) {
final int accommodationNumber = canBeAccommodated(j, i);
// LOG.debug(this.availableInstanceTypes[j].getIdentifier() + " fits into "
// + this.availableInstanceTypes[i].getIdentifier() + " " + accommodationNumber + " times");
if (accommodationNumber > 0) {
numberOfInstances[j] += numberOfMatchingInstances * accommodationNumber;
if (accommodationNumber > highestAccommodationNumber) {
highestAccommodationNumber = accommodationNumber;
highestAccommodationIndex = j;
}
}
}
// Calculate hardware description
HardwareDescription pessimisticHardwareDescription = null;
if (minNumberOfCPUCores < Integer.MAX_VALUE && minSizeOfPhysicalMemory < Long.MAX_VALUE
&& minSizeOfFreeMemory < Long.MAX_VALUE) {
pessimisticHardwareDescription = HardwareDescriptionFactory.construct(minNumberOfCPUCores,
minSizeOfPhysicalMemory, minSizeOfFreeMemory);
} else {
if (highestAccommodationIndex < i) { // Since highestAccommodationIndex smaller than my index, the
// target instance must be more powerful
final InstanceTypeDescription descriptionOfLargerInstanceType = instanceTypeDescriptionList
.get(highestAccommodationIndex);
if (descriptionOfLargerInstanceType.getHardwareDescription() != null) {
final HardwareDescription hardwareDescriptionOfLargerInstanceType = descriptionOfLargerInstanceType
.getHardwareDescription();
final int numCores = hardwareDescriptionOfLargerInstanceType.getNumberOfCPUCores()
/ highestAccommodationNumber;
final long physMem = hardwareDescriptionOfLargerInstanceType.getSizeOfPhysicalMemory()
/ highestAccommodationNumber;
final long freeMem = hardwareDescriptionOfLargerInstanceType.getSizeOfFreeMemory()
/ highestAccommodationNumber;
pessimisticHardwareDescription = HardwareDescriptionFactory.construct(numCores, physMem,
freeMem);
}
}
}
instanceTypeDescriptionList.add(InstanceTypeDescriptionFactory.construct(currentInstanceType,
pessimisticHardwareDescription, numberOfInstances[i]));
}
final Iterator<InstanceTypeDescription> it = instanceTypeDescriptionList.iterator();
while (it.hasNext()) {
final InstanceTypeDescription itd = it.next();
this.instanceTypeDescriptionMap.put(itd.getInstanceType(), itd);
}
} } | public class class_name {
private void updateInstaceTypeDescriptionMap() {
// this.registeredHosts.values().iterator()
this.instanceTypeDescriptionMap.clear();
final List<InstanceTypeDescription> instanceTypeDescriptionList = new ArrayList<InstanceTypeDescription>();
// initialize array which stores the availability counter for each instance type
final int[] numberOfInstances = new int[this.availableInstanceTypes.length];
for (int i = 0; i < numberOfInstances.length; i++) {
numberOfInstances[i] = 0; // depends on control dependency: [for], data = [i]
}
// Shuffle through instance types
for (int i = 0; i < this.availableInstanceTypes.length; i++) {
final InstanceType currentInstanceType = this.availableInstanceTypes[i];
int numberOfMatchingInstances = 0;
int minNumberOfCPUCores = Integer.MAX_VALUE;
long minSizeOfPhysicalMemory = Long.MAX_VALUE;
long minSizeOfFreeMemory = Long.MAX_VALUE;
final Iterator<ClusterInstance> it = this.registeredHosts.values().iterator();
while (it.hasNext()) {
final ClusterInstance clusterInstance = it.next();
if (clusterInstance.getType().equals(currentInstanceType)) {
++numberOfMatchingInstances; // depends on control dependency: [if], data = [none]
final HardwareDescription hardwareDescription = clusterInstance.getHardwareDescription();
minNumberOfCPUCores = Math.min(minNumberOfCPUCores, hardwareDescription.getNumberOfCPUCores()); // depends on control dependency: [if], data = [none]
minSizeOfPhysicalMemory = Math.min(minSizeOfPhysicalMemory,
hardwareDescription.getSizeOfPhysicalMemory()); // depends on control dependency: [if], data = [none]
minSizeOfFreeMemory = Math.min(minSizeOfFreeMemory, hardwareDescription.getSizeOfFreeMemory()); // depends on control dependency: [if], data = [none]
}
}
// Update number of instances
int highestAccommodationNumber = -1;
int highestAccommodationIndex = -1;
for (int j = 0; j < this.availableInstanceTypes.length; j++) {
final int accommodationNumber = canBeAccommodated(j, i);
// LOG.debug(this.availableInstanceTypes[j].getIdentifier() + " fits into "
// + this.availableInstanceTypes[i].getIdentifier() + " " + accommodationNumber + " times");
if (accommodationNumber > 0) {
numberOfInstances[j] += numberOfMatchingInstances * accommodationNumber; // depends on control dependency: [if], data = [none]
if (accommodationNumber > highestAccommodationNumber) {
highestAccommodationNumber = accommodationNumber; // depends on control dependency: [if], data = [none]
highestAccommodationIndex = j; // depends on control dependency: [if], data = [none]
}
}
}
// Calculate hardware description
HardwareDescription pessimisticHardwareDescription = null;
if (minNumberOfCPUCores < Integer.MAX_VALUE && minSizeOfPhysicalMemory < Long.MAX_VALUE
&& minSizeOfFreeMemory < Long.MAX_VALUE) {
pessimisticHardwareDescription = HardwareDescriptionFactory.construct(minNumberOfCPUCores,
minSizeOfPhysicalMemory, minSizeOfFreeMemory); // depends on control dependency: [if], data = [none]
} else {
if (highestAccommodationIndex < i) { // Since highestAccommodationIndex smaller than my index, the
// target instance must be more powerful
final InstanceTypeDescription descriptionOfLargerInstanceType = instanceTypeDescriptionList
.get(highestAccommodationIndex);
if (descriptionOfLargerInstanceType.getHardwareDescription() != null) {
final HardwareDescription hardwareDescriptionOfLargerInstanceType = descriptionOfLargerInstanceType
.getHardwareDescription();
final int numCores = hardwareDescriptionOfLargerInstanceType.getNumberOfCPUCores()
/ highestAccommodationNumber;
final long physMem = hardwareDescriptionOfLargerInstanceType.getSizeOfPhysicalMemory()
/ highestAccommodationNumber;
final long freeMem = hardwareDescriptionOfLargerInstanceType.getSizeOfFreeMemory()
/ highestAccommodationNumber;
pessimisticHardwareDescription = HardwareDescriptionFactory.construct(numCores, physMem,
freeMem); // depends on control dependency: [if], data = [none]
}
}
}
instanceTypeDescriptionList.add(InstanceTypeDescriptionFactory.construct(currentInstanceType,
pessimisticHardwareDescription, numberOfInstances[i])); // depends on control dependency: [for], data = [none]
}
final Iterator<InstanceTypeDescription> it = instanceTypeDescriptionList.iterator();
while (it.hasNext()) {
final InstanceTypeDescription itd = it.next();
this.instanceTypeDescriptionMap.put(itd.getInstanceType(), itd); // depends on control dependency: [while], data = [none]
}
} } |
public class class_name {
public static void parseJsonPathElements(Element validateElement, Map<String, Object> validateJsonPathExpressions) {
List<?> jsonPathElements = DomUtils.getChildElementsByTagName(validateElement, "json-path");
if (jsonPathElements.size() > 0) {
for (Iterator<?> jsonPathIterator = jsonPathElements.iterator(); jsonPathIterator.hasNext();) {
Element jsonPathElement = (Element) jsonPathIterator.next();
String expression = jsonPathElement.getAttribute("expression");
if (StringUtils.hasText(expression)) {
validateJsonPathExpressions.put(expression, jsonPathElement.getAttribute("value"));
}
}
}
} } | public class class_name {
public static void parseJsonPathElements(Element validateElement, Map<String, Object> validateJsonPathExpressions) {
List<?> jsonPathElements = DomUtils.getChildElementsByTagName(validateElement, "json-path");
if (jsonPathElements.size() > 0) {
for (Iterator<?> jsonPathIterator = jsonPathElements.iterator(); jsonPathIterator.hasNext();) {
Element jsonPathElement = (Element) jsonPathIterator.next();
String expression = jsonPathElement.getAttribute("expression");
if (StringUtils.hasText(expression)) {
validateJsonPathExpressions.put(expression, jsonPathElement.getAttribute("value")); // depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
final public boolean save() {
Transaction t = new Transaction();
try {
t.setSuccessful(save(t));
} finally {
t.finish();
}
return t.isSuccessful();
} } | public class class_name {
final public boolean save() {
Transaction t = new Transaction();
try {
t.setSuccessful(save(t)); // depends on control dependency: [try], data = [none]
} finally {
t.finish();
}
return t.isSuccessful();
} } |
public class class_name {
@Override
public void cleanUpNullReferences() {
List<K> keys = new LinkedList<>();
for (Map.Entry<K, V> entry : map.entrySet()) {
K key = entry.getKey();
V value = entry.getValue();
if (null == value
|| (value instanceof SoftReference && null == ((SoftReference) value).get())
|| (value instanceof WeakReference && null == ((WeakReference) value).get())) {
keys.add(key);
}
}
for (K key : keys) {
map.remove(key);
}
} } | public class class_name {
@Override
public void cleanUpNullReferences() {
List<K> keys = new LinkedList<>();
for (Map.Entry<K, V> entry : map.entrySet()) {
K key = entry.getKey();
V value = entry.getValue();
if (null == value
|| (value instanceof SoftReference && null == ((SoftReference) value).get())
|| (value instanceof WeakReference && null == ((WeakReference) value).get())) {
keys.add(key); // depends on control dependency: [if], data = [none]
}
}
for (K key : keys) {
map.remove(key); // depends on control dependency: [for], data = [key]
}
} } |
public class class_name {
@Nullable
public static String getVariantForView(@NonNull View view) {
for (Map.Entry<String, HashMap<Integer, String>> entry : bindings.entrySet()) {
if (entry.getValue().containsKey(view.getId())) {
return entry.getKey();
}
}
return null;
} } | public class class_name {
@Nullable
public static String getVariantForView(@NonNull View view) {
for (Map.Entry<String, HashMap<Integer, String>> entry : bindings.entrySet()) {
if (entry.getValue().containsKey(view.getId())) {
return entry.getKey(); // depends on control dependency: [if], data = [none]
}
}
return null;
} } |
public class class_name {
public boolean disconnect() {
TransactionOutput connectedOutput;
if (outpoint.fromTx != null) {
// The outpoint is connected using a "standard" wallet, disconnect it.
connectedOutput = outpoint.fromTx.getOutput((int) outpoint.getIndex());
outpoint.fromTx = null;
} else if (outpoint.connectedOutput != null) {
// The outpoint is connected using a UTXO based wallet, disconnect it.
connectedOutput = outpoint.connectedOutput;
outpoint.connectedOutput = null;
} else {
// The outpoint is not connected, do nothing.
return false;
}
if (connectedOutput != null && connectedOutput.getSpentBy() == this) {
// The outpoint was connected to an output, disconnect the output.
connectedOutput.markAsUnspent();
return true;
} else {
return false;
}
} } | public class class_name {
public boolean disconnect() {
TransactionOutput connectedOutput;
if (outpoint.fromTx != null) {
// The outpoint is connected using a "standard" wallet, disconnect it.
connectedOutput = outpoint.fromTx.getOutput((int) outpoint.getIndex()); // depends on control dependency: [if], data = [none]
outpoint.fromTx = null; // depends on control dependency: [if], data = [none]
} else if (outpoint.connectedOutput != null) {
// The outpoint is connected using a UTXO based wallet, disconnect it.
connectedOutput = outpoint.connectedOutput; // depends on control dependency: [if], data = [none]
outpoint.connectedOutput = null; // depends on control dependency: [if], data = [none]
} else {
// The outpoint is not connected, do nothing.
return false; // depends on control dependency: [if], data = [none]
}
if (connectedOutput != null && connectedOutput.getSpentBy() == this) {
// The outpoint was connected to an output, disconnect the output.
connectedOutput.markAsUnspent(); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
} else {
return false; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Nonnull
public static ESuccess writeMap (@Nonnull final Map <String, String> aMap, @Nonnull @WillClose final OutputStream aOS)
{
ValueEnforcer.notNull (aMap, "Map");
ValueEnforcer.notNull (aOS, "OutputStream");
try
{
final IMicroDocument aDoc = createMapDocument (aMap);
return MicroWriter.writeToStream (aDoc, aOS, XMLWriterSettings.DEFAULT_XML_SETTINGS);
}
finally
{
StreamHelper.close (aOS);
}
} } | public class class_name {
@Nonnull
public static ESuccess writeMap (@Nonnull final Map <String, String> aMap, @Nonnull @WillClose final OutputStream aOS)
{
ValueEnforcer.notNull (aMap, "Map");
ValueEnforcer.notNull (aOS, "OutputStream");
try
{
final IMicroDocument aDoc = createMapDocument (aMap);
return MicroWriter.writeToStream (aDoc, aOS, XMLWriterSettings.DEFAULT_XML_SETTINGS); // depends on control dependency: [try], data = [none]
}
finally
{
StreamHelper.close (aOS);
}
} } |
public class class_name {
private void doBind() throws Exception {
LOG.debug("Creating listener socket");
try {
m_selector = Selector.open();
} catch (IOException e) {
throw new RuntimeException(e);
}
ServerSocketChannel listenerSocket = ServerSocketChannel.open();
InetSocketAddress inetsockaddr;
if ((m_internalInterface == null) || (m_internalInterface.length() == 0)) {
inetsockaddr = new InetSocketAddress(m_internalPort);
}
else {
inetsockaddr = new InetSocketAddress(m_internalInterface, m_internalPort);
}
try {
hostLog.info("Attempting to bind to internal ip " + inetsockaddr);
listenerSocket.socket().bind(inetsockaddr);
listenerSocket.configureBlocking(false);
m_listenerSockets.add(listenerSocket);
} catch (Exception e) {
/*
* If we bound to the leader address, the internal interface address might not
* bind if it is all interfaces
*/
if (m_listenerSockets.isEmpty()) {
LOG.fatal("Failed to bind to " + inetsockaddr);
CoreUtils.printPortsInUse(hostLog);
throw e;
}
}
for (ServerSocketChannel ssc : m_listenerSockets) {
ssc.register(m_selector, SelectionKey.OP_ACCEPT);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Non-Primary Listening on:" + inetsockaddr.toString());
}
} } | public class class_name {
private void doBind() throws Exception {
LOG.debug("Creating listener socket");
try {
m_selector = Selector.open();
} catch (IOException e) {
throw new RuntimeException(e);
}
ServerSocketChannel listenerSocket = ServerSocketChannel.open();
InetSocketAddress inetsockaddr;
if ((m_internalInterface == null) || (m_internalInterface.length() == 0)) {
inetsockaddr = new InetSocketAddress(m_internalPort);
}
else {
inetsockaddr = new InetSocketAddress(m_internalInterface, m_internalPort);
}
try {
hostLog.info("Attempting to bind to internal ip " + inetsockaddr);
listenerSocket.socket().bind(inetsockaddr);
listenerSocket.configureBlocking(false);
m_listenerSockets.add(listenerSocket);
} catch (Exception e) {
/*
* If we bound to the leader address, the internal interface address might not
* bind if it is all interfaces
*/
if (m_listenerSockets.isEmpty()) {
LOG.fatal("Failed to bind to " + inetsockaddr); // depends on control dependency: [if], data = [none]
CoreUtils.printPortsInUse(hostLog); // depends on control dependency: [if], data = [none]
throw e;
}
}
for (ServerSocketChannel ssc : m_listenerSockets) {
ssc.register(m_selector, SelectionKey.OP_ACCEPT);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Non-Primary Listening on:" + inetsockaddr.toString());
}
} } |
public class class_name {
public final Object remove(Object key)
{
WeakEntry weakEntry = (WeakEntry) super.remove(key);
Object value = null;
if (weakEntry != null) {
value = weakEntry.get();
weakEntry.clear();
} // if (weakEntry != null).
clearUnreferencedEntries();
return value;
} } | public class class_name {
public final Object remove(Object key)
{
WeakEntry weakEntry = (WeakEntry) super.remove(key);
Object value = null;
if (weakEntry != null) {
value = weakEntry.get(); // depends on control dependency: [if], data = [none]
weakEntry.clear(); // depends on control dependency: [if], data = [none]
} // if (weakEntry != null).
clearUnreferencedEntries();
return value;
} } |
public class class_name {
protected final InetSocketAddress remoteAddress() {
if (remoteAddress == null) {
if (server) {
remoteAddress = new InetSocketAddress(NetUtil.LOCALHOST, randomClientPort());
} else {
remoteAddress = new InetSocketAddress(NetUtil.LOCALHOST,
guessServerPort(sessionProtocol, authority));
}
}
return remoteAddress;
} } | public class class_name {
protected final InetSocketAddress remoteAddress() {
if (remoteAddress == null) {
if (server) {
remoteAddress = new InetSocketAddress(NetUtil.LOCALHOST, randomClientPort()); // depends on control dependency: [if], data = [none]
} else {
remoteAddress = new InetSocketAddress(NetUtil.LOCALHOST,
guessServerPort(sessionProtocol, authority)); // depends on control dependency: [if], data = [none]
}
}
return remoteAddress;
} } |
public class class_name {
public synchronized void checkURLHostNameVerificationProperty(boolean reinitialize) {
// enable/disable hostname verification
String urlHostNameVerification = getGlobalProperty(Constants.SSLPROP_URL_HOSTNAME_VERIFICATION);
if (urlHostNameVerification == null || urlHostNameVerification.equalsIgnoreCase("false") || urlHostNameVerification.equalsIgnoreCase("no")) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "com.ibm.ssl.performURLHostNameVerification disabled");
HostnameVerifier verifier = new HostnameVerifier() {
@Override
public boolean verify(String urlHostname, SSLSession session) {
return true;
}
};
HttpsURLConnection.setDefaultHostnameVerifier(verifier);
if (!reinitialize) {
Tr.info(tc, "ssl.disable.url.hostname.verification.CWPKI0027I");
}
} else {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "com.ibm.ssl.performURLHostNameVerification enabled");
}
} } | public class class_name {
public synchronized void checkURLHostNameVerificationProperty(boolean reinitialize) {
// enable/disable hostname verification
String urlHostNameVerification = getGlobalProperty(Constants.SSLPROP_URL_HOSTNAME_VERIFICATION);
if (urlHostNameVerification == null || urlHostNameVerification.equalsIgnoreCase("false") || urlHostNameVerification.equalsIgnoreCase("no")) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "com.ibm.ssl.performURLHostNameVerification disabled");
HostnameVerifier verifier = new HostnameVerifier() {
@Override
public boolean verify(String urlHostname, SSLSession session) {
return true;
}
};
HttpsURLConnection.setDefaultHostnameVerifier(verifier); // depends on control dependency: [if], data = [none]
if (!reinitialize) {
Tr.info(tc, "ssl.disable.url.hostname.verification.CWPKI0027I"); // depends on control dependency: [if], data = [none]
}
} else {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "com.ibm.ssl.performURLHostNameVerification enabled");
}
} } |
public class class_name {
public String execute(){
normalizeTreeRequest();
JsTreeResult result = new JsTreeResult();
final AllFileSelector ALL_FILES = new AllFileSelector();
FileSystemManager fsManager = null;
FileObject rootFile = null;
FileObject file = null;
FileObject referenceFile = null;
try{
fsManager = VfsUtility.getManager();
rootFile = fsManager.resolveFile(rootPath, fsOptions);
if (JsTreeRequest.OP_GET_CHILDREN.equalsIgnoreCase(requestData.getOperation())){
String parentPath = requestData.getId();
List<JsTreeNodeData> nodes = null;
try{
nodes = getChildNodes(rootFile, parentPath);
if (requestData.isRoot() && rootNodeName != null){ // add root node
JsTreeNodeData rootNode = new JsTreeNodeData();
rootNode.setData(rootNodeName);
Map<String, Object> attr = new HashMap<String, Object>();
rootNode.setAttr(attr);
attr.put("id", ".");
attr.put("rel", "root");
attr.put("fileType", FileType.FOLDER.toString());
rootNode.setChildren(nodes);
rootNode.setState(JsTreeNodeData.STATE_OPEN);
nodes = new LinkedList<JsTreeNodeData>();
nodes.add(rootNode);
}
}catch(Exception e){
log.error("Cannot get child nodes for: " + parentPath, e);
nodes = new LinkedList<JsTreeNodeData>();
}
responseData = nodes;
} else if (JsTreeRequest.OP_REMOVE_NODE.equalsIgnoreCase(requestData.getOperation())){
String path = requestData.getId();
try{
file = rootFile.resolveFile(path, NameScope.DESCENDENT);
boolean wasDeleted = false;
if (file.getType() == FileType.FILE){
wasDeleted = file.delete();
}else{
wasDeleted = file.delete(ALL_FILES) > 0;
}
result.setStatus(wasDeleted);
} catch (Exception e){
result.setStatus(false);
log.error("Cannot delete: " + path, e);
}
responseData = result;
} else if (JsTreeRequest.OP_CREATE_NODE.equalsIgnoreCase(requestData.getOperation())){
String parentPath = requestData.getReferenceId();
String name = requestData.getTitle();
try{
referenceFile = rootFile.resolveFile(parentPath, NameScope.DESCENDENT_OR_SELF);
file = referenceFile.resolveFile(name, NameScope.CHILD);
file.createFolder();
result.setStatus(true);
result.setId(rootFile.getName().getRelativeName(file.getName()));
} catch (Exception e){
result.setStatus(false);
log.error("Cannot create folder '" + name + "' under '" + parentPath + "'", e);
}
responseData = result;
} else if (JsTreeRequest.OP_RENAME_NODE.equalsIgnoreCase(requestData.getOperation())){
String path = requestData.getId();
String name = requestData.getTitle();
try{
referenceFile = rootFile.resolveFile(path, NameScope.DESCENDENT);
file = referenceFile.getParent().resolveFile(name, NameScope.CHILD);
referenceFile.moveTo(file);
result.setStatus(true);
}catch(Exception e){
result.setStatus(false);
log.error("Cannot rename '" + path + "' to '" + name + "'", e);
}
responseData = result;
} else if (JsTreeRequest.OP_MOVE_NODE.equalsIgnoreCase(requestData.getOperation())){
String newParentPath = requestData.getReferenceId();
String originalPath = requestData.getId();
try{
referenceFile = rootFile.resolveFile(originalPath, NameScope.DESCENDENT);
file = rootFile.resolveFile(newParentPath, NameScope.DESCENDENT_OR_SELF)
.resolveFile(referenceFile.getName().getBaseName(), NameScope.CHILD);
if (requestData.isCopy()){
file.copyFrom(referenceFile, ALL_FILES);
}else{
referenceFile.moveTo(file);
}
result.setStatus(true);
}catch(Exception e){
result.setStatus(false);
log.error("Cannot move '" + originalPath + "' to '" + newParentPath + "'", e);
}
responseData = result;
}
} catch (FileSystemException e) {
log.error("Cannot perform file operation.", e);
}finally{
VfsUtility.close(fsManager, file, referenceFile, rootFile);
}
return SUCCESS;
} } | public class class_name {
public String execute(){
normalizeTreeRequest();
JsTreeResult result = new JsTreeResult();
final AllFileSelector ALL_FILES = new AllFileSelector();
FileSystemManager fsManager = null;
FileObject rootFile = null;
FileObject file = null;
FileObject referenceFile = null;
try{
fsManager = VfsUtility.getManager();
rootFile = fsManager.resolveFile(rootPath, fsOptions);
if (JsTreeRequest.OP_GET_CHILDREN.equalsIgnoreCase(requestData.getOperation())){
String parentPath = requestData.getId();
List<JsTreeNodeData> nodes = null;
try{
nodes = getChildNodes(rootFile, parentPath);
// depends on control dependency: [try], data = [none]
if (requestData.isRoot() && rootNodeName != null){ // add root node
JsTreeNodeData rootNode = new JsTreeNodeData();
rootNode.setData(rootNodeName);
// depends on control dependency: [if], data = [none]
Map<String, Object> attr = new HashMap<String, Object>();
rootNode.setAttr(attr);
// depends on control dependency: [if], data = [none]
attr.put("id", ".");
// depends on control dependency: [if], data = [none]
attr.put("rel", "root");
// depends on control dependency: [if], data = [none]
attr.put("fileType", FileType.FOLDER.toString());
// depends on control dependency: [if], data = [none]
rootNode.setChildren(nodes);
// depends on control dependency: [if], data = [none]
rootNode.setState(JsTreeNodeData.STATE_OPEN);
// depends on control dependency: [if], data = [none]
nodes = new LinkedList<JsTreeNodeData>();
// depends on control dependency: [if], data = [none]
nodes.add(rootNode);
// depends on control dependency: [if], data = [none]
}
}catch(Exception e){
log.error("Cannot get child nodes for: " + parentPath, e);
nodes = new LinkedList<JsTreeNodeData>();
}
// depends on control dependency: [catch], data = [none]
responseData = nodes;
} else if (JsTreeRequest.OP_REMOVE_NODE.equalsIgnoreCase(requestData.getOperation())){
String path = requestData.getId();
try{
file = rootFile.resolveFile(path, NameScope.DESCENDENT);
boolean wasDeleted = false;
if (file.getType() == FileType.FILE){
wasDeleted = file.delete();
// depends on control dependency: [if], data = [none]
}else{
wasDeleted = file.delete(ALL_FILES) > 0;
// depends on control dependency: [if], data = [none]
}
result.setStatus(wasDeleted);
} catch (Exception e){
result.setStatus(false);
log.error("Cannot delete: " + path, e);
}
responseData = result;
} else if (JsTreeRequest.OP_CREATE_NODE.equalsIgnoreCase(requestData.getOperation())){
String parentPath = requestData.getReferenceId();
String name = requestData.getTitle();
try{
referenceFile = rootFile.resolveFile(parentPath, NameScope.DESCENDENT_OR_SELF);
file = referenceFile.resolveFile(name, NameScope.CHILD);
file.createFolder();
result.setStatus(true);
result.setId(rootFile.getName().getRelativeName(file.getName()));
} catch (Exception e){
result.setStatus(false);
log.error("Cannot create folder '" + name + "' under '" + parentPath + "'", e);
}
responseData = result;
} else if (JsTreeRequest.OP_RENAME_NODE.equalsIgnoreCase(requestData.getOperation())){
String path = requestData.getId();
String name = requestData.getTitle();
try{
referenceFile = rootFile.resolveFile(path, NameScope.DESCENDENT);
file = referenceFile.getParent().resolveFile(name, NameScope.CHILD);
referenceFile.moveTo(file);
result.setStatus(true);
}catch(Exception e){
result.setStatus(false);
log.error("Cannot rename '" + path + "' to '" + name + "'", e);
}
responseData = result;
} else if (JsTreeRequest.OP_MOVE_NODE.equalsIgnoreCase(requestData.getOperation())){
String newParentPath = requestData.getReferenceId();
String originalPath = requestData.getId();
try{
referenceFile = rootFile.resolveFile(originalPath, NameScope.DESCENDENT);
file = rootFile.resolveFile(newParentPath, NameScope.DESCENDENT_OR_SELF)
.resolveFile(referenceFile.getName().getBaseName(), NameScope.CHILD);
if (requestData.isCopy()){
file.copyFrom(referenceFile, ALL_FILES);
}else{
referenceFile.moveTo(file);
}
result.setStatus(true);
}catch(Exception e){
result.setStatus(false);
log.error("Cannot move '" + originalPath + "' to '" + newParentPath + "'", e);
}
responseData = result;
}
} catch (FileSystemException e) {
log.error("Cannot perform file operation.", e);
}finally{
VfsUtility.close(fsManager, file, referenceFile, rootFile);
}
return SUCCESS;
} } |
public class class_name {
public synchronized void start() {
if (running) {
log.atInfo().log("%s is already started", this);
return;
}
log.atInfo().log("starting %s", this);
this.stopped = false;
this.running = true;
this.reportStopwatch.reset().start();
try {
schedulerThread = threads.newThread(new Runnable() {
@Override
public void run() {
scheduleFlushes();
}
});
schedulerThread.start();
} catch (RuntimeException e) {
log.atInfo().log(BACKGROUND_THREAD_ERROR);
schedulerThread = null;
initializeFlushing();
}
} } | public class class_name {
public synchronized void start() {
if (running) {
log.atInfo().log("%s is already started", this); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
log.atInfo().log("starting %s", this);
this.stopped = false;
this.running = true;
this.reportStopwatch.reset().start();
try {
schedulerThread = threads.newThread(new Runnable() {
@Override
public void run() {
scheduleFlushes();
}
}); // depends on control dependency: [try], data = [none]
schedulerThread.start(); // depends on control dependency: [try], data = [none]
} catch (RuntimeException e) {
log.atInfo().log(BACKGROUND_THREAD_ERROR);
schedulerThread = null;
initializeFlushing();
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public int find(int low, int high) {
while (low <= high) {
int mid = (low + high) >>> 1;
int delta = compare(mid);
if (delta < 0) {
low = mid + 1;
} else if (delta > 0) {
high = mid - 1;
} else {
return mid;
}
}
// not found
return -(low + 1);
} } | public class class_name {
public int find(int low, int high) {
while (low <= high) {
int mid = (low + high) >>> 1;
int delta = compare(mid);
if (delta < 0) {
low = mid + 1; // depends on control dependency: [if], data = [none]
} else if (delta > 0) {
high = mid - 1; // depends on control dependency: [if], data = [none]
} else {
return mid; // depends on control dependency: [if], data = [none]
}
}
// not found
return -(low + 1);
} } |
public class class_name {
@Override
public JType apply(String nodeName, JsonNode node, JsonNode parent, JClassContainer jClassContainer, Schema schema) {
String propertyTypeName = getTypeName(node);
JType type;
if (propertyTypeName.equals("object") || node.has("properties") && node.path("properties").size() > 0) {
type = ruleFactory.getObjectRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema);
} else if (node.has("existingJavaType")) {
String typeName = node.path("existingJavaType").asText();
if (isPrimitive(typeName, jClassContainer.owner())) {
type = primitiveType(typeName, jClassContainer.owner());
} else {
type = resolveType(jClassContainer, typeName);
}
} else if (propertyTypeName.equals("string")) {
type = jClassContainer.owner().ref(String.class);
} else if (propertyTypeName.equals("number")) {
type = getNumberType(jClassContainer.owner(), ruleFactory.getGenerationConfig());
} else if (propertyTypeName.equals("integer")) {
type = getIntegerType(jClassContainer.owner(), node, ruleFactory.getGenerationConfig());
} else if (propertyTypeName.equals("boolean")) {
type = unboxIfNecessary(jClassContainer.owner().ref(Boolean.class), ruleFactory.getGenerationConfig());
} else if (propertyTypeName.equals("array")) {
type = ruleFactory.getArrayRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema);
} else {
type = jClassContainer.owner().ref(Object.class);
}
if (!node.has("javaType") && !node.has("existingJavaType") && node.has("format")) {
type = ruleFactory.getFormatRule().apply(nodeName, node.get("format"), node, type, schema);
} else if (!node.has("javaType") && !node.has("existingJavaType") && propertyTypeName.equals("string") && node.has("media")) {
type = ruleFactory.getMediaRule().apply(nodeName, node.get("media"), node, type, schema);
}
return type;
} } | public class class_name {
@Override
public JType apply(String nodeName, JsonNode node, JsonNode parent, JClassContainer jClassContainer, Schema schema) {
String propertyTypeName = getTypeName(node);
JType type;
if (propertyTypeName.equals("object") || node.has("properties") && node.path("properties").size() > 0) {
type = ruleFactory.getObjectRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema); // depends on control dependency: [if], data = [none]
} else if (node.has("existingJavaType")) {
String typeName = node.path("existingJavaType").asText();
if (isPrimitive(typeName, jClassContainer.owner())) {
type = primitiveType(typeName, jClassContainer.owner()); // depends on control dependency: [if], data = [none]
} else {
type = resolveType(jClassContainer, typeName); // depends on control dependency: [if], data = [none]
}
} else if (propertyTypeName.equals("string")) {
type = jClassContainer.owner().ref(String.class); // depends on control dependency: [if], data = [none]
} else if (propertyTypeName.equals("number")) {
type = getNumberType(jClassContainer.owner(), ruleFactory.getGenerationConfig()); // depends on control dependency: [if], data = [none]
} else if (propertyTypeName.equals("integer")) {
type = getIntegerType(jClassContainer.owner(), node, ruleFactory.getGenerationConfig()); // depends on control dependency: [if], data = [none]
} else if (propertyTypeName.equals("boolean")) {
type = unboxIfNecessary(jClassContainer.owner().ref(Boolean.class), ruleFactory.getGenerationConfig()); // depends on control dependency: [if], data = [none]
} else if (propertyTypeName.equals("array")) {
type = ruleFactory.getArrayRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema); // depends on control dependency: [if], data = [none]
} else {
type = jClassContainer.owner().ref(Object.class); // depends on control dependency: [if], data = [none]
}
if (!node.has("javaType") && !node.has("existingJavaType") && node.has("format")) {
type = ruleFactory.getFormatRule().apply(nodeName, node.get("format"), node, type, schema); // depends on control dependency: [if], data = [none]
} else if (!node.has("javaType") && !node.has("existingJavaType") && propertyTypeName.equals("string") && node.has("media")) {
type = ruleFactory.getMediaRule().apply(nodeName, node.get("media"), node, type, schema); // depends on control dependency: [if], data = [none]
}
return type;
} } |
public class class_name {
public void setSSHPublicKeys(java.util.Collection<SSHPublicKeyMetadata> sSHPublicKeys) {
if (sSHPublicKeys == null) {
this.sSHPublicKeys = null;
return;
}
this.sSHPublicKeys = new com.amazonaws.internal.SdkInternalList<SSHPublicKeyMetadata>(sSHPublicKeys);
} } | public class class_name {
public void setSSHPublicKeys(java.util.Collection<SSHPublicKeyMetadata> sSHPublicKeys) {
if (sSHPublicKeys == null) {
this.sSHPublicKeys = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.sSHPublicKeys = new com.amazonaws.internal.SdkInternalList<SSHPublicKeyMetadata>(sSHPublicKeys);
} } |
public class class_name {
@java.lang.SuppressWarnings( { "ConstantConditions", "UnnecessaryBoxing" })
public void setObject(int tagType, @NotNull Object value)
{
if (value == null)
throw new NullPointerException("cannot set a null object");
if (!_tagMap.containsKey(Integer.valueOf(tagType))) {
_definedTagList.add(new Tag(tagType, this));
}
// else {
// final Object oldValue = _tagMap.get(tagType);
// if (!oldValue.equals(value))
// addError(String.format("Overwritten tag 0x%s (%s). Old=%s, New=%s", Integer.toHexString(tagType), getTagName(tagType), oldValue, value));
// }
_tagMap.put(tagType, value);
} } | public class class_name {
@java.lang.SuppressWarnings( { "ConstantConditions", "UnnecessaryBoxing" })
public void setObject(int tagType, @NotNull Object value)
{
if (value == null)
throw new NullPointerException("cannot set a null object");
if (!_tagMap.containsKey(Integer.valueOf(tagType))) {
_definedTagList.add(new Tag(tagType, this)); // depends on control dependency: [if], data = [none]
}
// else {
// final Object oldValue = _tagMap.get(tagType);
// if (!oldValue.equals(value))
// addError(String.format("Overwritten tag 0x%s (%s). Old=%s, New=%s", Integer.toHexString(tagType), getTagName(tagType), oldValue, value));
// }
_tagMap.put(tagType, value);
} } |
public class class_name {
public WebReply determineWebReply(Subject receivedSubject, String uriName, WebRequest webRequest) {
WebReply webReply = performInitialChecks(webRequest, uriName);
if (webReply != null) {
logAuditEntriesBeforeAuthn(webReply, receivedSubject, uriName, webRequest);
return webReply;
}
AuthenticationResult authResult = authenticateRequest(webRequest);
return determineWebReply(receivedSubject, uriName, webRequest, authResult);
} } | public class class_name {
public WebReply determineWebReply(Subject receivedSubject, String uriName, WebRequest webRequest) {
WebReply webReply = performInitialChecks(webRequest, uriName);
if (webReply != null) {
logAuditEntriesBeforeAuthn(webReply, receivedSubject, uriName, webRequest); // depends on control dependency: [if], data = [(webReply]
return webReply; // depends on control dependency: [if], data = [none]
}
AuthenticationResult authResult = authenticateRequest(webRequest);
return determineWebReply(receivedSubject, uriName, webRequest, authResult);
} } |
public class class_name {
@Override public final String getHeaderField(int position) {
try {
return getHeaders().value(position);
} catch (IOException e) {
return null;
}
} } | public class class_name {
@Override public final String getHeaderField(int position) {
try {
return getHeaders().value(position); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
return null;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private List<String> getKeysList(final String key, final Map<String, String> keysRefMap) {
final List<String> list = new ArrayList<>();
// Iterate the map to look for multi-level keys
for (Entry<String, String> entry : keysRefMap.entrySet()) {
// Multi-level key found
if (entry.getValue().equals(key)) {
// add key into the list
final String entryKey = entry.getKey();
list.add(entryKey);
// still have multi-level keys
if (keysRefMap.containsValue(entryKey)) {
// rescuive point
final List<String> tempList = getKeysList(entryKey, keysRefMap);
list.addAll(tempList);
}
}
}
return list;
} } | public class class_name {
private List<String> getKeysList(final String key, final Map<String, String> keysRefMap) {
final List<String> list = new ArrayList<>();
// Iterate the map to look for multi-level keys
for (Entry<String, String> entry : keysRefMap.entrySet()) {
// Multi-level key found
if (entry.getValue().equals(key)) {
// add key into the list
final String entryKey = entry.getKey();
list.add(entryKey); // depends on control dependency: [if], data = [none]
// still have multi-level keys
if (keysRefMap.containsValue(entryKey)) {
// rescuive point
final List<String> tempList = getKeysList(entryKey, keysRefMap);
list.addAll(tempList); // depends on control dependency: [if], data = [none]
}
}
}
return list;
} } |
public class class_name {
private void addListCollectorEditorButtons(Element editable) {
CmsListCollectorEditor editor = new CmsListCollectorEditor(editable, m_clientId);
add(editor, editable.getParentElement());
if (CmsDomUtil.hasDimension(editable.getParentElement())) {
editor.setParentHasDimensions(true);
editor.setPosition(CmsDomUtil.getEditablePosition(editable), getElement());
} else {
editor.setParentHasDimensions(false);
}
m_editables.put(editable, editor);
} } | public class class_name {
private void addListCollectorEditorButtons(Element editable) {
CmsListCollectorEditor editor = new CmsListCollectorEditor(editable, m_clientId);
add(editor, editable.getParentElement());
if (CmsDomUtil.hasDimension(editable.getParentElement())) {
editor.setParentHasDimensions(true); // depends on control dependency: [if], data = [none]
editor.setPosition(CmsDomUtil.getEditablePosition(editable), getElement()); // depends on control dependency: [if], data = [none]
} else {
editor.setParentHasDimensions(false); // depends on control dependency: [if], data = [none]
}
m_editables.put(editable, editor);
} } |
public class class_name {
public ListDocumentsRequest withFilters(DocumentKeyValuesFilter... filters) {
if (this.filters == null) {
setFilters(new com.amazonaws.internal.SdkInternalList<DocumentKeyValuesFilter>(filters.length));
}
for (DocumentKeyValuesFilter ele : filters) {
this.filters.add(ele);
}
return this;
} } | public class class_name {
public ListDocumentsRequest withFilters(DocumentKeyValuesFilter... filters) {
if (this.filters == null) {
setFilters(new com.amazonaws.internal.SdkInternalList<DocumentKeyValuesFilter>(filters.length)); // depends on control dependency: [if], data = [none]
}
for (DocumentKeyValuesFilter ele : filters) {
this.filters.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
@Override
public int next(int bits)
{
int result;
try
{
lock.lock();
// Set cell addresses using address of current cell.
int cellC = currentCellIndex - 1;
int cellB = cellC - 1;
int cellA = cellB - 1;
// Update cell states using rule table.
cells[currentCellIndex] = RNG_RULE[cells[cellC] + cells[currentCellIndex]];
cells[cellC] = RNG_RULE[cells[cellB] + cells[cellC]];
cells[cellB] = RNG_RULE[cells[cellA] + cells[cellB]];
// Update the state of cellA and shift current cell to the left by 4 bytes.
if (cellA == 0)
{
cells[cellA] = RNG_RULE[cells[cellA]];
currentCellIndex = AUTOMATON_LENGTH - 1;
}
else
{
cells[cellA] = RNG_RULE[cells[cellA - 1] + cells[cellA]];
currentCellIndex -= 4;
}
result = convertCellsToInt(cells, cellA);
}
finally
{
lock.unlock();
}
return result >>> (32 - bits);
} } | public class class_name {
@Override
public int next(int bits)
{
int result;
try
{
lock.lock(); // depends on control dependency: [try], data = [none]
// Set cell addresses using address of current cell.
int cellC = currentCellIndex - 1;
int cellB = cellC - 1;
int cellA = cellB - 1;
// Update cell states using rule table.
cells[currentCellIndex] = RNG_RULE[cells[cellC] + cells[currentCellIndex]]; // depends on control dependency: [try], data = [none]
cells[cellC] = RNG_RULE[cells[cellB] + cells[cellC]]; // depends on control dependency: [try], data = [none]
cells[cellB] = RNG_RULE[cells[cellA] + cells[cellB]]; // depends on control dependency: [try], data = [none]
// Update the state of cellA and shift current cell to the left by 4 bytes.
if (cellA == 0)
{
cells[cellA] = RNG_RULE[cells[cellA]]; // depends on control dependency: [if], data = [none]
currentCellIndex = AUTOMATON_LENGTH - 1; // depends on control dependency: [if], data = [none]
}
else
{
cells[cellA] = RNG_RULE[cells[cellA - 1] + cells[cellA]]; // depends on control dependency: [if], data = [none]
currentCellIndex -= 4; // depends on control dependency: [if], data = [none]
}
result = convertCellsToInt(cells, cellA); // depends on control dependency: [try], data = [none]
}
finally
{
lock.unlock();
}
return result >>> (32 - bits);
} } |
public class class_name {
public void addColumns(ArrayList<String> columns, String defaultValue) {
CompactCharSequence dv = new CompactCharSequence(defaultValue);
for (int i = 0; i < data.length; i++) {
CompactCharSequence[] row = data[i];
int oldrowlength = data[i].length;
data[i] = (CompactCharSequence[]) resizeArray(row, oldrowlength + columns.size());
for (int c = 0; c < columns.size(); c++) {
data[i][oldrowlength + c] = dv;
}
if (i == 0) {
for (int c = 0; c < columns.size(); c++) {
String column = columns.get(c);
data[0][oldrowlength + c] = new CompactCharSequence(column);
columnLookup.put(column, new HeaderInfo(oldrowlength + c));
}
}
}
// columnLookup.get("ZNF30");
// int startIndex = columnLookup.size() + 1;
// for (String column : columns) {
// if(column.equals("ttr")){
// int dummy = 1;
// }
// columnLookup.put(column, new HeaderInfo(startIndex));
// startIndex++;
// }
} } | public class class_name {
public void addColumns(ArrayList<String> columns, String defaultValue) {
CompactCharSequence dv = new CompactCharSequence(defaultValue);
for (int i = 0; i < data.length; i++) {
CompactCharSequence[] row = data[i];
int oldrowlength = data[i].length;
data[i] = (CompactCharSequence[]) resizeArray(row, oldrowlength + columns.size()); // depends on control dependency: [for], data = [i]
for (int c = 0; c < columns.size(); c++) {
data[i][oldrowlength + c] = dv; // depends on control dependency: [for], data = [c]
}
if (i == 0) {
for (int c = 0; c < columns.size(); c++) {
String column = columns.get(c);
data[0][oldrowlength + c] = new CompactCharSequence(column); // depends on control dependency: [for], data = [c]
columnLookup.put(column, new HeaderInfo(oldrowlength + c)); // depends on control dependency: [for], data = [c]
}
}
}
// columnLookup.get("ZNF30");
// int startIndex = columnLookup.size() + 1;
// for (String column : columns) {
// if(column.equals("ttr")){
// int dummy = 1;
// }
// columnLookup.put(column, new HeaderInfo(startIndex));
// startIndex++;
// }
} } |
public class class_name {
protected synchronized void cacheToken(final Token token) {
log.debug("OAuthTokenManager.cacheToken");
// Parse token expires in seconds.
int tokenExpiresInSecs;
try {
tokenExpiresInSecs = Integer.parseInt(token.getExpires_in());
} catch (NumberFormatException exception) {
tokenExpiresInSecs = 0;
}
// Parse token expiration time
long tokenExpirationTime;
try {
tokenExpirationTime = Long.parseLong(token.getExpiration());
} catch (NumberFormatException exception) {
tokenExpirationTime = 0;
}
// Calculate token refresh time based on lifespan percentage offset.
long refreshBeforeExpirySecs = (long) (tokenExpiresInSecs * this.iamRefreshOffset);
long tokenRefreshTime = tokenExpirationTime - refreshBeforeExpirySecs;
token.setRefreshTime(tokenRefreshTime);
token.setExpirationTime(tokenExpirationTime);
setTokenCache(token);
} } | public class class_name {
protected synchronized void cacheToken(final Token token) {
log.debug("OAuthTokenManager.cacheToken");
// Parse token expires in seconds.
int tokenExpiresInSecs;
try {
tokenExpiresInSecs = Integer.parseInt(token.getExpires_in()); // depends on control dependency: [try], data = [none]
} catch (NumberFormatException exception) {
tokenExpiresInSecs = 0;
} // depends on control dependency: [catch], data = [none]
// Parse token expiration time
long tokenExpirationTime;
try {
tokenExpirationTime = Long.parseLong(token.getExpiration()); // depends on control dependency: [try], data = [none]
} catch (NumberFormatException exception) {
tokenExpirationTime = 0;
} // depends on control dependency: [catch], data = [none]
// Calculate token refresh time based on lifespan percentage offset.
long refreshBeforeExpirySecs = (long) (tokenExpiresInSecs * this.iamRefreshOffset);
long tokenRefreshTime = tokenExpirationTime - refreshBeforeExpirySecs;
token.setRefreshTime(tokenRefreshTime);
token.setExpirationTime(tokenExpirationTime);
setTokenCache(token);
} } |
public class class_name {
private void endApplication(@Nullable Thread uncaughtExceptionThread, Throwable th) {
final boolean letDefaultHandlerEndApplication = config.alsoReportToAndroidFramework();
final boolean handlingUncaughtException = uncaughtExceptionThread != null;
if (handlingUncaughtException && letDefaultHandlerEndApplication && defaultExceptionHandler != null) {
// Let the system default handler do it's job and display the force close dialog.
if (ACRA.DEV_LOGGING) ACRA.log.d(LOG_TAG, "Handing Exception on to default ExceptionHandler");
defaultExceptionHandler.uncaughtException(uncaughtExceptionThread, th);
} else {
processFinisher.endApplication();
}
} } | public class class_name {
private void endApplication(@Nullable Thread uncaughtExceptionThread, Throwable th) {
final boolean letDefaultHandlerEndApplication = config.alsoReportToAndroidFramework();
final boolean handlingUncaughtException = uncaughtExceptionThread != null;
if (handlingUncaughtException && letDefaultHandlerEndApplication && defaultExceptionHandler != null) {
// Let the system default handler do it's job and display the force close dialog.
if (ACRA.DEV_LOGGING) ACRA.log.d(LOG_TAG, "Handing Exception on to default ExceptionHandler");
defaultExceptionHandler.uncaughtException(uncaughtExceptionThread, th); // depends on control dependency: [if], data = [none]
} else {
processFinisher.endApplication(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private static byte[] getQualifier(final long start_time) {
if (start_time < 1) {
throw new IllegalArgumentException("The start timestamp has not been set");
}
final long base_time;
final byte[] qualifier;
long timestamp = start_time;
// downsample to seconds to save space AND prevent duplicates if the time
// is on a second boundary (e.g. if someone posts at 1328140800 with value A
// and 1328140800000L with value B)
if (timestamp % 1000 == 0) {
timestamp = timestamp / 1000;
}
if ((timestamp & Const.SECOND_MASK) != 0) {
// drop the ms timestamp to seconds to calculate the base timestamp
base_time = ((timestamp / 1000) -
((timestamp / 1000) % Const.MAX_TIMESPAN));
qualifier = new byte[5];
final int offset = (int) (timestamp - (base_time * 1000));
System.arraycopy(Bytes.fromInt(offset), 0, qualifier, 1, 4);
} else {
base_time = (timestamp - (timestamp % Const.MAX_TIMESPAN));
qualifier = new byte[3];
final short offset = (short) (timestamp - base_time);
System.arraycopy(Bytes.fromShort(offset), 0, qualifier, 1, 2);
}
qualifier[0] = PREFIX;
return qualifier;
} } | public class class_name {
private static byte[] getQualifier(final long start_time) {
if (start_time < 1) {
throw new IllegalArgumentException("The start timestamp has not been set");
}
final long base_time;
final byte[] qualifier;
long timestamp = start_time;
// downsample to seconds to save space AND prevent duplicates if the time
// is on a second boundary (e.g. if someone posts at 1328140800 with value A
// and 1328140800000L with value B)
if (timestamp % 1000 == 0) {
timestamp = timestamp / 1000; // depends on control dependency: [if], data = [none]
}
if ((timestamp & Const.SECOND_MASK) != 0) {
// drop the ms timestamp to seconds to calculate the base timestamp
base_time = ((timestamp / 1000) -
((timestamp / 1000) % Const.MAX_TIMESPAN)); // depends on control dependency: [if], data = [0)]
qualifier = new byte[5]; // depends on control dependency: [if], data = [none]
final int offset = (int) (timestamp - (base_time * 1000));
System.arraycopy(Bytes.fromInt(offset), 0, qualifier, 1, 4); // depends on control dependency: [if], data = [none]
} else {
base_time = (timestamp - (timestamp % Const.MAX_TIMESPAN)); // depends on control dependency: [if], data = [none]
qualifier = new byte[3]; // depends on control dependency: [if], data = [none]
final short offset = (short) (timestamp - base_time);
System.arraycopy(Bytes.fromShort(offset), 0, qualifier, 1, 2); // depends on control dependency: [if], data = [none]
}
qualifier[0] = PREFIX;
return qualifier;
} } |
public class class_name {
public final boolean encloses(HString other) {
if (other == null) {
return false;
}
return (document() != null && other.document() != null) &&
(document() == other.document())
&& super.encloses(other);
} } | public class class_name {
public final boolean encloses(HString other) {
if (other == null) {
return false; // depends on control dependency: [if], data = [none]
}
return (document() != null && other.document() != null) &&
(document() == other.document())
&& super.encloses(other);
} } |
public class class_name {
public TafResp validate(Taf.LifeForm reading, HttpServletRequest req, HttpServletResponse resp) {
// See if Request implements BasicCred (aka CadiWrap or other), and if User/Pass has already been set separately
if(req instanceof BasicCred) {
BasicCred bc = (BasicCred)req;
if(bc.getUser()!=null) { // CadiWrap, if set, makes sure User & Password are both valid, or both null
if(DenialOfServiceTaf.isDeniedID(bc.getUser())!=null) {
return DenialOfServiceTaf.respDenyID(access,bc.getUser());
}
CachedBasicPrincipal bp = new CachedBasicPrincipal(this,bc,realm,timeToLive);
// ONLY FOR Last Ditch DEBUGGING...
// access.log(Level.WARN,bp.getName() + ":" + new String(bp.getCred()));
if(rbac.validate(bp.getName(),Type.PASSWORD,bp.getCred())) {
return new BasicHttpTafResp(access,bp,bp.getName()+" authenticated by password",RESP.IS_AUTHENTICATED,resp,realm,false);
} else {
//TODO may need timed retries in a given time period
return new BasicHttpTafResp(access,null,buildMsg(bp,req,"User/Pass combo invalid for ",bc.getUser()),
RESP.TRY_AUTHENTICATING,resp,realm,true);
}
}
}
// Get User/Password from Authorization Header value
String authz = req.getHeader("Authorization");
if(authz != null && authz.startsWith("Basic ")) {
if(warn&&!req.isSecure()) {
access.log(Level.WARN,"WARNING! BasicAuth has been used over an insecure channel");
}
try {
CachedBasicPrincipal ba = new CachedBasicPrincipal(this,authz,realm,timeToLive);
if(DenialOfServiceTaf.isDeniedID(ba.getName())!=null) {
return DenialOfServiceTaf.respDenyID(access,ba.getName());
}
// ONLY FOR Last Ditch DEBUGGING...
// access.log(Level.WARN,ba.getName() + ":" + new String(ba.getCred()));
if(rbac.validate(ba.getName(), Type.PASSWORD, ba.getCred())) {
return new BasicHttpTafResp(access,ba, ba.getName()+" authenticated by BasicAuth password",RESP.IS_AUTHENTICATED,resp,realm,false);
} else {
//TODO may need timed retries in a given time period
return new BasicHttpTafResp(access,null,buildMsg(ba,req,"User/Pass combo invalid"),
RESP.TRY_AUTHENTICATING,resp,realm,true);
}
} catch (IOException e) {
String msg = buildMsg(null,req,"Failed HTTP Basic Authorization (", e.getMessage(), ')');
access.log(Level.INFO,msg);
return new BasicHttpTafResp(access,null,msg, RESP.TRY_AUTHENTICATING, resp, realm,true);
}
}
return new BasicHttpTafResp(access,null,"Requesting HTTP Basic Authorization",RESP.TRY_AUTHENTICATING,resp,realm,false);
} } | public class class_name {
public TafResp validate(Taf.LifeForm reading, HttpServletRequest req, HttpServletResponse resp) {
// See if Request implements BasicCred (aka CadiWrap or other), and if User/Pass has already been set separately
if(req instanceof BasicCred) {
BasicCred bc = (BasicCred)req;
if(bc.getUser()!=null) { // CadiWrap, if set, makes sure User & Password are both valid, or both null
if(DenialOfServiceTaf.isDeniedID(bc.getUser())!=null) {
return DenialOfServiceTaf.respDenyID(access,bc.getUser()); // depends on control dependency: [if], data = [none]
}
CachedBasicPrincipal bp = new CachedBasicPrincipal(this,bc,realm,timeToLive);
// ONLY FOR Last Ditch DEBUGGING...
// access.log(Level.WARN,bp.getName() + ":" + new String(bp.getCred()));
if(rbac.validate(bp.getName(),Type.PASSWORD,bp.getCred())) {
return new BasicHttpTafResp(access,bp,bp.getName()+" authenticated by password",RESP.IS_AUTHENTICATED,resp,realm,false); // depends on control dependency: [if], data = [none]
} else {
//TODO may need timed retries in a given time period
return new BasicHttpTafResp(access,null,buildMsg(bp,req,"User/Pass combo invalid for ",bc.getUser()),
RESP.TRY_AUTHENTICATING,resp,realm,true); // depends on control dependency: [if], data = [none]
}
}
}
// Get User/Password from Authorization Header value
String authz = req.getHeader("Authorization");
if(authz != null && authz.startsWith("Basic ")) {
if(warn&&!req.isSecure()) {
access.log(Level.WARN,"WARNING! BasicAuth has been used over an insecure channel"); // depends on control dependency: [if], data = [none]
}
try {
CachedBasicPrincipal ba = new CachedBasicPrincipal(this,authz,realm,timeToLive);
if(DenialOfServiceTaf.isDeniedID(ba.getName())!=null) {
return DenialOfServiceTaf.respDenyID(access,ba.getName()); // depends on control dependency: [if], data = [none]
}
// ONLY FOR Last Ditch DEBUGGING...
// access.log(Level.WARN,ba.getName() + ":" + new String(ba.getCred()));
if(rbac.validate(ba.getName(), Type.PASSWORD, ba.getCred())) {
return new BasicHttpTafResp(access,ba, ba.getName()+" authenticated by BasicAuth password",RESP.IS_AUTHENTICATED,resp,realm,false); // depends on control dependency: [if], data = [none]
} else {
//TODO may need timed retries in a given time period
return new BasicHttpTafResp(access,null,buildMsg(ba,req,"User/Pass combo invalid"),
RESP.TRY_AUTHENTICATING,resp,realm,true); // depends on control dependency: [if], data = [none]
}
} catch (IOException e) {
String msg = buildMsg(null,req,"Failed HTTP Basic Authorization (", e.getMessage(), ')');
access.log(Level.INFO,msg);
return new BasicHttpTafResp(access,null,msg, RESP.TRY_AUTHENTICATING, resp, realm,true);
} // depends on control dependency: [catch], data = [none]
}
return new BasicHttpTafResp(access,null,"Requesting HTTP Basic Authorization",RESP.TRY_AUTHENTICATING,resp,realm,false);
} } |
public class class_name {
public static Map<RouteDefinition, Method> get(Class clazz) {
Map<RouteDefinition, Method> out = new HashMap<>();
Map<RouteDefinition, Method> candidates = collect(clazz);
// Final check if definitions are OK
for (RouteDefinition definition : candidates.keySet()) {
if (definition.getMethod() == null) { // skip non REST methods
continue;
}
Method method = candidates.get(definition);
Assert.notNull(definition.getRoutePath(), getClassMethod(clazz, method) + " - Missing route @Path!");
int bodyParamCount = 0;
for (MethodParameter param : definition.getParameters()) {
if (bodyParamCount > 0 && (ParameterType.body.equals(param.getType()) || ParameterType.unknown.equals(param.getType()))) {
// OK we have to body params ...
throw new IllegalArgumentException(getClassMethod(clazz, method) + " - to many body arguments given. " +
"Missing argument annotation (@PathParam, @QueryParam, @FormParam, @HeaderParam, @CookieParam or @Context) for: " +
param.getType() + " " + param.getName() + "!");
}
if (ParameterType.unknown.equals(param.getType())) { // proclaim as body param
// check if method allows for a body param
Assert.isTrue(definition.requestHasBody(), getClassMethod(clazz, method) + " - " +
"Missing argument annotation (@PathParam, @QueryParam, @FormParam, @HeaderParam, @CookieParam or @Context) for: " +
param.getName() + "!");
param.setType(ParameterType.body);
}
if (ParameterType.body.equals(param.getType())) {
bodyParamCount++;
}
}
out.put(definition, method);
}
return out;
} } | public class class_name {
public static Map<RouteDefinition, Method> get(Class clazz) {
Map<RouteDefinition, Method> out = new HashMap<>();
Map<RouteDefinition, Method> candidates = collect(clazz);
// Final check if definitions are OK
for (RouteDefinition definition : candidates.keySet()) {
if (definition.getMethod() == null) { // skip non REST methods
continue;
}
Method method = candidates.get(definition);
Assert.notNull(definition.getRoutePath(), getClassMethod(clazz, method) + " - Missing route @Path!"); // depends on control dependency: [for], data = [definition]
int bodyParamCount = 0;
for (MethodParameter param : definition.getParameters()) {
if (bodyParamCount > 0 && (ParameterType.body.equals(param.getType()) || ParameterType.unknown.equals(param.getType()))) {
// OK we have to body params ...
throw new IllegalArgumentException(getClassMethod(clazz, method) + " - to many body arguments given. " +
"Missing argument annotation (@PathParam, @QueryParam, @FormParam, @HeaderParam, @CookieParam or @Context) for: " +
param.getType() + " " + param.getName() + "!");
}
if (ParameterType.unknown.equals(param.getType())) { // proclaim as body param
// check if method allows for a body param
Assert.isTrue(definition.requestHasBody(), getClassMethod(clazz, method) + " - " +
"Missing argument annotation (@PathParam, @QueryParam, @FormParam, @HeaderParam, @CookieParam or @Context) for: " +
param.getName() + "!"); // depends on control dependency: [if], data = [none]
param.setType(ParameterType.body); // depends on control dependency: [if], data = [none]
}
if (ParameterType.body.equals(param.getType())) {
bodyParamCount++; // depends on control dependency: [if], data = [none]
}
}
out.put(definition, method); // depends on control dependency: [for], data = [definition]
}
return out;
} } |
public class class_name {
public static ByteBuffer asByteBuffer(java.util.UUID uuid) {
if (uuid == null) {
return null;
}
return ByteBuffer.wrap(asByteArray(uuid));
} } | public class class_name {
public static ByteBuffer asByteBuffer(java.util.UUID uuid) {
if (uuid == null) {
return null; // depends on control dependency: [if], data = [none]
}
return ByteBuffer.wrap(asByteArray(uuid));
} } |
public class class_name {
public void checkin(AdminClient client) {
if (isClosed.get()) {
throw new IllegalStateException("Pool is closing");
}
if (client == null) {
throw new IllegalArgumentException("client is null");
}
boolean isCheckedIn = clientCache.offer(client);
if (!isCheckedIn) {
// Cache is already full, close this AdminClient
client.close();
}
} } | public class class_name {
public void checkin(AdminClient client) {
if (isClosed.get()) {
throw new IllegalStateException("Pool is closing");
}
if (client == null) {
throw new IllegalArgumentException("client is null");
}
boolean isCheckedIn = clientCache.offer(client);
if (!isCheckedIn) {
// Cache is already full, close this AdminClient
client.close(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public void run() {
Throwable thrown = null;
try {
if (state == COMPLETELY_CLOSED)
return;
try {
LOG.error("{} is not closed manually, cleaned up from Cleaner",
chronicleHashIdentityString);
} catch (Throwable t) {
thrown = t;
} finally {
synchronized (this) {
if (state == COMPLETELY_CLOSED) {
LOG.error("Somebody closed {} while it is processed by Cleaner, " +
"this should be impossible", chronicleHashIdentityString);
} else {
thrown = Throwables.returnOrSuppress(thrown, releaseEverything(true));
}
}
if (thrown != null) {
try {
LOG.error("Error on releasing resources of " + chronicleHashIdentityString,
thrown);
} catch (Throwable t) {
// This may occur if we are in shutdown hooks, and the log service has
// already been shut down. Try to fall back to printStackTrace().
thrown.addSuppressed(t);
thrown.printStackTrace();
}
}
}
} catch (Throwable ignore) {
// Just don't fail anyway. We will have another attempt to close this ChronicleMap from
// ChronicleHashCloseOnExitHook.
}
} } | public class class_name {
@Override
public void run() {
Throwable thrown = null;
try {
if (state == COMPLETELY_CLOSED)
return;
try {
LOG.error("{} is not closed manually, cleaned up from Cleaner",
chronicleHashIdentityString); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
thrown = t;
} finally { // depends on control dependency: [catch], data = [none]
synchronized (this) {
if (state == COMPLETELY_CLOSED) {
LOG.error("Somebody closed {} while it is processed by Cleaner, " +
"this should be impossible", chronicleHashIdentityString); // depends on control dependency: [if], data = [none]
} else {
thrown = Throwables.returnOrSuppress(thrown, releaseEverything(true)); // depends on control dependency: [if], data = [none]
}
}
if (thrown != null) {
try {
LOG.error("Error on releasing resources of " + chronicleHashIdentityString,
thrown); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
// This may occur if we are in shutdown hooks, and the log service has
// already been shut down. Try to fall back to printStackTrace().
thrown.addSuppressed(t);
thrown.printStackTrace();
} // depends on control dependency: [catch], data = [none]
}
}
} catch (Throwable ignore) {
// Just don't fail anyway. We will have another attempt to close this ChronicleMap from
// ChronicleHashCloseOnExitHook.
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected JsonObject silentlyCreateObjectFromString(String json) {
JsonObject errorObject = null;
// We need to swallow exceptions here because it's possible to get a legit
// Facebook response that contains illegal JSON (e.g.
// users.getLoggedInUser returning 1240077) - we're only interested in
// whether or not there's an error_code field present.
try {
errorObject = Json.parse(json).asObject();
} catch (ParseException e) {
// do nothing here
}
return errorObject;
} } | public class class_name {
protected JsonObject silentlyCreateObjectFromString(String json) {
JsonObject errorObject = null;
// We need to swallow exceptions here because it's possible to get a legit
// Facebook response that contains illegal JSON (e.g.
// users.getLoggedInUser returning 1240077) - we're only interested in
// whether or not there's an error_code field present.
try {
errorObject = Json.parse(json).asObject(); // depends on control dependency: [try], data = [none]
} catch (ParseException e) {
// do nothing here
} // depends on control dependency: [catch], data = [none]
return errorObject;
} } |
public class class_name {
private void processQueue() {
CachedEvent cachedEvent;
try {
IRTMPEvent event = null;
RTMPMessage message = null;
// get first event in the queue
cachedEvent = queue.poll();
if (cachedEvent != null) {
// get the data type
final byte dataType = cachedEvent.getDataType();
// get the data
IoBuffer buffer = cachedEvent.getData();
// get the current size of the buffer / data
int bufferLimit = buffer.limit();
if (bufferLimit > 0) {
// create new RTMP message and push to the consumer
switch (dataType) {
case Constants.TYPE_AGGREGATE:
event = new Aggregate(buffer);
event.setTimestamp(cachedEvent.getTimestamp());
message = RTMPMessage.build(event);
break;
case Constants.TYPE_AUDIO_DATA:
event = new AudioData(buffer);
event.setTimestamp(cachedEvent.getTimestamp());
message = RTMPMessage.build(event);
break;
case Constants.TYPE_VIDEO_DATA:
event = new VideoData(buffer);
event.setTimestamp(cachedEvent.getTimestamp());
message = RTMPMessage.build(event);
break;
default:
event = new Notify(buffer);
event.setTimestamp(cachedEvent.getTimestamp());
message = RTMPMessage.build(event);
break;
}
// push it down to the recorder
recordingConsumer.pushMessage(null, message);
} else if (bufferLimit == 0 && dataType == Constants.TYPE_AUDIO_DATA) {
log.debug("Stream data size was 0, sending empty audio message");
// allow for 0 byte audio packets
event = new AudioData(IoBuffer.allocate(0));
event.setTimestamp(cachedEvent.getTimestamp());
message = RTMPMessage.build(event);
// push it down to the recorder
recordingConsumer.pushMessage(null, message);
} else {
log.debug("Stream data size was 0, recording pipe will not be notified");
}
}
} catch (Exception e) {
log.warn("Exception while pushing to consumer", e);
}
} } | public class class_name {
private void processQueue() {
CachedEvent cachedEvent;
try {
IRTMPEvent event = null;
RTMPMessage message = null;
// get first event in the queue
cachedEvent = queue.poll();
// depends on control dependency: [try], data = [none]
if (cachedEvent != null) {
// get the data type
final byte dataType = cachedEvent.getDataType();
// get the data
IoBuffer buffer = cachedEvent.getData();
// get the current size of the buffer / data
int bufferLimit = buffer.limit();
if (bufferLimit > 0) {
// create new RTMP message and push to the consumer
switch (dataType) {
case Constants.TYPE_AGGREGATE:
event = new Aggregate(buffer);
event.setTimestamp(cachedEvent.getTimestamp());
message = RTMPMessage.build(event);
break;
case Constants.TYPE_AUDIO_DATA:
event = new AudioData(buffer);
event.setTimestamp(cachedEvent.getTimestamp());
message = RTMPMessage.build(event);
break;
case Constants.TYPE_VIDEO_DATA:
event = new VideoData(buffer);
event.setTimestamp(cachedEvent.getTimestamp());
message = RTMPMessage.build(event);
break;
default:
event = new Notify(buffer);
event.setTimestamp(cachedEvent.getTimestamp());
message = RTMPMessage.build(event);
break;
}
// push it down to the recorder
recordingConsumer.pushMessage(null, message);
// depends on control dependency: [if], data = [none]
} else if (bufferLimit == 0 && dataType == Constants.TYPE_AUDIO_DATA) {
log.debug("Stream data size was 0, sending empty audio message");
// depends on control dependency: [if], data = [none]
// allow for 0 byte audio packets
event = new AudioData(IoBuffer.allocate(0));
// depends on control dependency: [if], data = [none]
event.setTimestamp(cachedEvent.getTimestamp());
// depends on control dependency: [if], data = [none]
message = RTMPMessage.build(event);
// depends on control dependency: [if], data = [none]
// push it down to the recorder
recordingConsumer.pushMessage(null, message);
// depends on control dependency: [if], data = [none]
} else {
log.debug("Stream data size was 0, recording pipe will not be notified");
// depends on control dependency: [if], data = [none]
}
}
} catch (Exception e) {
log.warn("Exception while pushing to consumer", e);
}
// depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void decodeHeader() throws IOException {
if(!headerDecoded) {
headerDecoded = true;
int m = getMarker();
if(m != 0xD8) {
throw new IOException("no SOI");
}
m = getMarker();
while(m != 0xC0 && m != 0xC1) { // SOF
processMarker(m);
m = getMarker();
while(m == MARKER_NONE) {
m = getMarker();
}
}
processSOF();
}
} } | public class class_name {
public void decodeHeader() throws IOException {
if(!headerDecoded) {
headerDecoded = true;
int m = getMarker();
if(m != 0xD8) {
throw new IOException("no SOI");
}
m = getMarker();
while(m != 0xC0 && m != 0xC1) { // SOF
processMarker(m);
m = getMarker();
while(m == MARKER_NONE) {
m = getMarker(); // depends on control dependency: [while], data = [none]
}
}
processSOF();
}
} } |
public class class_name {
@Nullable
private static <A extends Annotation> A getAnnotation(PropertyDescriptor descriptor,
Class<A> type) {
A a = null;
if (descriptor.getWriteMethod() != null) {
a = descriptor.getWriteMethod().getAnnotation(type);
}
if (a == null && descriptor.getReadMethod() != null) {
a = descriptor.getReadMethod().getAnnotation(type);
}
return a;
} } | public class class_name {
@Nullable
private static <A extends Annotation> A getAnnotation(PropertyDescriptor descriptor,
Class<A> type) {
A a = null;
if (descriptor.getWriteMethod() != null) {
a = descriptor.getWriteMethod().getAnnotation(type); // depends on control dependency: [if], data = [none]
}
if (a == null && descriptor.getReadMethod() != null) {
a = descriptor.getReadMethod().getAnnotation(type); // depends on control dependency: [if], data = [none]
}
return a;
} } |
public class class_name {
private boolean isFirstUse(int reg) {
LocalVariableTable lvt = getMethod().getLocalVariableTable();
if (lvt == null) {
return true;
}
LocalVariable lv = lvt.getLocalVariable(reg, getPC());
return lv == null;
} } | public class class_name {
private boolean isFirstUse(int reg) {
LocalVariableTable lvt = getMethod().getLocalVariableTable();
if (lvt == null) {
return true; // depends on control dependency: [if], data = [none]
}
LocalVariable lv = lvt.getLocalVariable(reg, getPC());
return lv == null;
} } |
public class class_name {
private List<CacheObject> sort() {
List<CacheObject> cacheObjectList = new ArrayList<CacheObject>(cache.values());
if (config.getSortLength() != 0) {
int end = config.getSortLength();
if (cacheObjectList.size() < config.getSortLength()) {
end = cacheObjectList.size();
}
cacheObjectList = cacheObjectList.subList(0, end);
}
Collections.sort(cacheObjectList, config.getEvictionPolicy());
return cacheObjectList;
} } | public class class_name {
private List<CacheObject> sort() {
List<CacheObject> cacheObjectList = new ArrayList<CacheObject>(cache.values());
if (config.getSortLength() != 0) {
int end = config.getSortLength();
if (cacheObjectList.size() < config.getSortLength()) {
end = cacheObjectList.size();
// depends on control dependency: [if], data = [none]
}
cacheObjectList = cacheObjectList.subList(0, end);
// depends on control dependency: [if], data = [none]
}
Collections.sort(cacheObjectList, config.getEvictionPolicy());
return cacheObjectList;
} } |
public class class_name {
protected void debugFw(LaJobRuntime runtime, String msg, Object... args) {
if (runtime.isFrameworkDebug() && logger.isInfoEnabled()) {
logger.info("#job #fw " + msg, args); // info level for production
}
} } | public class class_name {
protected void debugFw(LaJobRuntime runtime, String msg, Object... args) {
if (runtime.isFrameworkDebug() && logger.isInfoEnabled()) {
logger.info("#job #fw " + msg, args); // info level for production // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
protected void addContents(Character uc, Collection<? extends Element> memberlist,
Content contentTree) {
addHeading(uc, contentTree);
// Display the list only if there are elements to be displayed.
if (!memberlist.isEmpty()) {
Content dl = new HtmlTree(HtmlTag.DL);
for (Element element : memberlist) {
addDescription(dl, element);
}
contentTree.addContent(dl);
}
} } | public class class_name {
protected void addContents(Character uc, Collection<? extends Element> memberlist,
Content contentTree) {
addHeading(uc, contentTree);
// Display the list only if there are elements to be displayed.
if (!memberlist.isEmpty()) {
Content dl = new HtmlTree(HtmlTag.DL);
for (Element element : memberlist) {
addDescription(dl, element); // depends on control dependency: [for], data = [element]
}
contentTree.addContent(dl); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void addGroupedRow(int rowIndex) {
int[][] newRowGroups = getRowGroups();
// Create a group if none exists.
if (newRowGroups.length == 0) {
newRowGroups = new int[][]{{rowIndex}};
} else {
int lastGroupIndex = newRowGroups.length - 1;
int[] lastGroup = newRowGroups[lastGroupIndex];
int groupSize = lastGroup.length;
int[] newLastGroup = new int[groupSize + 1];
System.arraycopy(lastGroup, 0, newLastGroup, 0, groupSize);
newLastGroup[groupSize] = rowIndex;
newRowGroups[lastGroupIndex] = newLastGroup;
}
setRowGroups(newRowGroups);
} } | public class class_name {
public void addGroupedRow(int rowIndex) {
int[][] newRowGroups = getRowGroups();
// Create a group if none exists.
if (newRowGroups.length == 0) {
newRowGroups = new int[][]{{rowIndex}}; // depends on control dependency: [if], data = [none]
} else {
int lastGroupIndex = newRowGroups.length - 1;
int[] lastGroup = newRowGroups[lastGroupIndex];
int groupSize = lastGroup.length;
int[] newLastGroup = new int[groupSize + 1];
System.arraycopy(lastGroup, 0, newLastGroup, 0, groupSize); // depends on control dependency: [if], data = [none]
newLastGroup[groupSize] = rowIndex; // depends on control dependency: [if], data = [none]
newRowGroups[lastGroupIndex] = newLastGroup; // depends on control dependency: [if], data = [none]
}
setRowGroups(newRowGroups);
} } |
public class class_name {
public List<SecurityRoleRefType<EntityBeanType<T>>> getAllSecurityRoleRef()
{
List<SecurityRoleRefType<EntityBeanType<T>>> list = new ArrayList<SecurityRoleRefType<EntityBeanType<T>>>();
List<Node> nodeList = childNode.get("security-role-ref");
for(Node node: nodeList)
{
SecurityRoleRefType<EntityBeanType<T>> type = new SecurityRoleRefTypeImpl<EntityBeanType<T>>(this, "security-role-ref", childNode, node);
list.add(type);
}
return list;
} } | public class class_name {
public List<SecurityRoleRefType<EntityBeanType<T>>> getAllSecurityRoleRef()
{
List<SecurityRoleRefType<EntityBeanType<T>>> list = new ArrayList<SecurityRoleRefType<EntityBeanType<T>>>();
List<Node> nodeList = childNode.get("security-role-ref");
for(Node node: nodeList)
{
SecurityRoleRefType<EntityBeanType<T>> type = new SecurityRoleRefTypeImpl<EntityBeanType<T>>(this, "security-role-ref", childNode, node);
list.add(type); // depends on control dependency: [for], data = [none]
}
return list;
} } |
public class class_name {
protected Set<Class<?>> loadAllClass(File jarFile, ScriptClassLoader loader) throws Exception {
Set<Class<?>> clzzs = new HashSet<Class<?>>();
JarFile jf = new JarFile(jarFile);
try {
loader.addURL(jarFile.toURI().toURL());// 添加文件到加载器
Enumeration<JarEntry> it = jf.entries();
while (it.hasMoreElements()) {
JarEntry jarEntry = it.nextElement();
if (jarEntry.getName().endsWith(".class")) {
String className = jarEntry.getName().replace("/", ".").replaceAll(".class", "");
clzzs.add(loader.findClass(className));
}
}
} finally {
jf.close();
}
return clzzs;
} } | public class class_name {
protected Set<Class<?>> loadAllClass(File jarFile, ScriptClassLoader loader) throws Exception {
Set<Class<?>> clzzs = new HashSet<Class<?>>();
JarFile jf = new JarFile(jarFile);
try {
loader.addURL(jarFile.toURI().toURL());// 添加文件到加载器 // depends on control dependency: [try], data = [none]
Enumeration<JarEntry> it = jf.entries();
while (it.hasMoreElements()) {
JarEntry jarEntry = it.nextElement();
if (jarEntry.getName().endsWith(".class")) {
String className = jarEntry.getName().replace("/", ".").replaceAll(".class", "");
clzzs.add(loader.findClass(className)); // depends on control dependency: [if], data = [none]
}
}
} finally {
jf.close();
}
return clzzs;
} } |
public class class_name {
@SuppressWarnings("unchecked")
public static <T> T asType(Collection col, Class<T> clazz) {
if (col.getClass() == clazz) {
return (T) col;
}
if (clazz == List.class) {
return (T) asList((Iterable) col);
}
if (clazz == Set.class) {
if (col instanceof Set) return (T) col;
return (T) new LinkedHashSet(col);
}
if (clazz == SortedSet.class) {
if (col instanceof SortedSet) return (T) col;
return (T) new TreeSet(col);
}
if (clazz == Queue.class) {
if (col instanceof Queue) return (T) col;
return (T) new LinkedList(col);
}
if (clazz == Stack.class) {
if (col instanceof Stack) return (T) col;
final Stack stack = new Stack();
stack.addAll(col);
return (T) stack;
}
if (clazz!=String[].class && ReflectionCache.isArray(clazz)) {
try {
return (T) asArrayType(col, clazz);
} catch (GroovyCastException e) {
/* ignore */
}
}
Object[] args = {col};
try {
return (T) InvokerHelper.invokeConstructorOf(clazz, args);
} catch (Exception e) {
// ignore, the constructor that takes a Collection as an argument may not exist
}
if (Collection.class.isAssignableFrom(clazz)) {
try {
Collection result = (Collection) InvokerHelper.invokeConstructorOf(clazz, null);
result.addAll(col);
return (T)result;
} catch (Exception e) {
// ignore, the no arg constructor might not exist.
}
}
return asType((Object) col, clazz);
} } | public class class_name {
@SuppressWarnings("unchecked")
public static <T> T asType(Collection col, Class<T> clazz) {
if (col.getClass() == clazz) {
return (T) col; // depends on control dependency: [if], data = [none]
}
if (clazz == List.class) {
return (T) asList((Iterable) col); // depends on control dependency: [if], data = [none]
}
if (clazz == Set.class) {
if (col instanceof Set) return (T) col;
return (T) new LinkedHashSet(col); // depends on control dependency: [if], data = [none]
}
if (clazz == SortedSet.class) {
if (col instanceof SortedSet) return (T) col;
return (T) new TreeSet(col); // depends on control dependency: [if], data = [none]
}
if (clazz == Queue.class) {
if (col instanceof Queue) return (T) col;
return (T) new LinkedList(col); // depends on control dependency: [if], data = [none]
}
if (clazz == Stack.class) {
if (col instanceof Stack) return (T) col;
final Stack stack = new Stack();
stack.addAll(col); // depends on control dependency: [if], data = [none]
return (T) stack; // depends on control dependency: [if], data = [none]
}
if (clazz!=String[].class && ReflectionCache.isArray(clazz)) {
try {
return (T) asArrayType(col, clazz); // depends on control dependency: [try], data = [none]
} catch (GroovyCastException e) {
/* ignore */
} // depends on control dependency: [catch], data = [none]
}
Object[] args = {col};
try {
return (T) InvokerHelper.invokeConstructorOf(clazz, args); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
// ignore, the constructor that takes a Collection as an argument may not exist
} // depends on control dependency: [catch], data = [none]
if (Collection.class.isAssignableFrom(clazz)) {
try {
Collection result = (Collection) InvokerHelper.invokeConstructorOf(clazz, null);
result.addAll(col); // depends on control dependency: [try], data = [none]
return (T)result; // depends on control dependency: [try], data = [none]
} catch (Exception e) {
// ignore, the no arg constructor might not exist.
} // depends on control dependency: [catch], data = [none]
}
return asType((Object) col, clazz);
} } |
public class class_name {
@GET
@Produces(MediaType.APPLICATION_XML)
public Response getProviders() {
logger.debug("StartOf getProviders - REQUEST for /providers");
ProviderHelper providerRestService = getProviderHelper();
String serializedProviders = null;
try {
serializedProviders = providerRestService.getProviders();
} catch (HelperException e) {
logger.info("getTemplates exception:"+e.getMessage());
return buildResponse(e);
}
logger.debug("EndOf getTemplates");
return buildResponse(200, serializedProviders);
} } | public class class_name {
@GET
@Produces(MediaType.APPLICATION_XML)
public Response getProviders() {
logger.debug("StartOf getProviders - REQUEST for /providers");
ProviderHelper providerRestService = getProviderHelper();
String serializedProviders = null;
try {
serializedProviders = providerRestService.getProviders(); // depends on control dependency: [try], data = [none]
} catch (HelperException e) {
logger.info("getTemplates exception:"+e.getMessage());
return buildResponse(e);
} // depends on control dependency: [catch], data = [none]
logger.debug("EndOf getTemplates");
return buildResponse(200, serializedProviders);
} } |
public class class_name {
public static String listToCSVString(final List<?> list) {
final StringBuilder buf = new StringBuilder();
if (list != null) {
boolean needComma = false;
for (final Object o : list) {
if (needComma) {
buf.append(",");
}
buf.append(StringUtil.toStringOrEmpty(o));
needComma = true;
}
}
return buf.toString();
} } | public class class_name {
public static String listToCSVString(final List<?> list) {
final StringBuilder buf = new StringBuilder();
if (list != null) {
boolean needComma = false;
for (final Object o : list) {
if (needComma) {
buf.append(",");
// depends on control dependency: [if], data = [none]
}
buf.append(StringUtil.toStringOrEmpty(o));
// depends on control dependency: [for], data = [o]
needComma = true;
// depends on control dependency: [for], data = [o]
}
}
return buf.toString();
} } |
public class class_name {
public int getPixel(int x, int y) {
int pixel = -1;
if (pixels == null) {
readPixels();
}
if (pixels != null) {
pixel = pixels[y][x];
} else {
throw new GeoPackageException("Could not retrieve pixel value");
}
return pixel;
} } | public class class_name {
public int getPixel(int x, int y) {
int pixel = -1;
if (pixels == null) {
readPixels(); // depends on control dependency: [if], data = [none]
}
if (pixels != null) {
pixel = pixels[y][x]; // depends on control dependency: [if], data = [none]
} else {
throw new GeoPackageException("Could not retrieve pixel value");
}
return pixel;
} } |
public class class_name {
private List<StationStats> getStationStats(List<Short> stations, VoltTable[] lastDepartResult) {
final List<StationStats> stationStats = new ArrayList<>(stations.size());
for (int i = 0; i < stations.size(); i++) {
final short station = stations.get(i);
TimestampType lastDepartTime;
if (lastDepartResult[i].advanceRow()) {
lastDepartTime = lastDepartResult[i].getTimestampAsTimestamp("last_depart");
} else {
lastDepartTime = new TimestampType(0);
}
voltQueueSQL(getTrainDeparts, station, lastDepartTime);
final VoltTable departResult = voltExecuteSQL()[0];
while (departResult.advanceRow()) {
final TimestampType departTime = departResult.getTimestampAsTimestamp("time");
voltQueueSQL(getWaitTimeForTrain, departTime, station, lastDepartTime, departTime);
lastDepartTime = departTime;
}
long totalWaitTime = 0;
long totalEntries = 0;
if (departResult.getRowCount() > 0) {
final VoltTable[] waitTimeResult = voltExecuteSQL();
for (VoltTable res : waitTimeResult) {
if (res.advanceRow()) {
final long entries = res.getLong("entries");
final long waitTime = res.getLong("wait_sum");
// Don't record empty train
if (entries > 0) {
totalEntries += entries;
totalWaitTime += waitTime;
}
}
}
}
stationStats.add(new StationStats(station, lastDepartTime, totalWaitTime, totalEntries));
}
return stationStats;
} } | public class class_name {
private List<StationStats> getStationStats(List<Short> stations, VoltTable[] lastDepartResult) {
final List<StationStats> stationStats = new ArrayList<>(stations.size());
for (int i = 0; i < stations.size(); i++) {
final short station = stations.get(i);
TimestampType lastDepartTime;
if (lastDepartResult[i].advanceRow()) {
lastDepartTime = lastDepartResult[i].getTimestampAsTimestamp("last_depart"); // depends on control dependency: [if], data = [none]
} else {
lastDepartTime = new TimestampType(0); // depends on control dependency: [if], data = [none]
}
voltQueueSQL(getTrainDeparts, station, lastDepartTime); // depends on control dependency: [for], data = [none]
final VoltTable departResult = voltExecuteSQL()[0];
while (departResult.advanceRow()) {
final TimestampType departTime = departResult.getTimestampAsTimestamp("time");
voltQueueSQL(getWaitTimeForTrain, departTime, station, lastDepartTime, departTime); // depends on control dependency: [while], data = [none]
lastDepartTime = departTime; // depends on control dependency: [while], data = [none]
}
long totalWaitTime = 0;
long totalEntries = 0;
if (departResult.getRowCount() > 0) {
final VoltTable[] waitTimeResult = voltExecuteSQL();
for (VoltTable res : waitTimeResult) {
if (res.advanceRow()) {
final long entries = res.getLong("entries");
final long waitTime = res.getLong("wait_sum");
// Don't record empty train
if (entries > 0) {
totalEntries += entries; // depends on control dependency: [if], data = [none]
totalWaitTime += waitTime; // depends on control dependency: [if], data = [none]
}
}
}
}
stationStats.add(new StationStats(station, lastDepartTime, totalWaitTime, totalEntries)); // depends on control dependency: [for], data = [none]
}
return stationStats;
} } |
public class class_name {
public List<?> formatJSON2List(String json) {
List<?> list = null;
try {
list = MAPPER.readValue(json, List.class);
} catch (Exception e) {
LOGGER.error("formatJSON2List error, json = " + json, e);
}
return list;
} } | public class class_name {
public List<?> formatJSON2List(String json) {
List<?> list = null;
try {
list = MAPPER.readValue(json, List.class); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
LOGGER.error("formatJSON2List error, json = " + json, e);
} // depends on control dependency: [catch], data = [none]
return list;
} } |
public class class_name {
private Set<Predicate> copyPredicatesSet() {
Set<Predicate> predicatesCopy = Sets.newLinkedHashSet();
for (Predicate predicate : this.predicates) {
Predicate copyPredicate = new Predicate();
copyPredicate.setField(predicate.getField());
copyPredicate.setOperator(predicate.getOperator());
copyPredicate.setValues(Arrays.copyOf(predicate.getValues(), predicate.getValues().length));
predicatesCopy.add(copyPredicate);
}
return predicatesCopy;
} } | public class class_name {
private Set<Predicate> copyPredicatesSet() {
Set<Predicate> predicatesCopy = Sets.newLinkedHashSet();
for (Predicate predicate : this.predicates) {
Predicate copyPredicate = new Predicate();
copyPredicate.setField(predicate.getField()); // depends on control dependency: [for], data = [predicate]
copyPredicate.setOperator(predicate.getOperator()); // depends on control dependency: [for], data = [predicate]
copyPredicate.setValues(Arrays.copyOf(predicate.getValues(), predicate.getValues().length)); // depends on control dependency: [for], data = [predicate]
predicatesCopy.add(copyPredicate); // depends on control dependency: [for], data = [predicate]
}
return predicatesCopy;
} } |
public class class_name {
public final void loadAll(Properties ps)
{
//表示如果Field是static的,则obj即便给它传值,JVM也会忽略的。还说明了,此入参在这种情况下可以为null
for(Field f:getClass().getDeclaredFields())
{
try {
setFiled(f,ps.getProperty(f.getName()));
} catch (Exception e) {
log.error(e.getMessage(),e);
}
}
} } | public class class_name {
public final void loadAll(Properties ps)
{
//表示如果Field是static的,则obj即便给它传值,JVM也会忽略的。还说明了,此入参在这种情况下可以为null
for(Field f:getClass().getDeclaredFields())
{
try {
setFiled(f,ps.getProperty(f.getName()));
// depends on control dependency: [try], data = [none]
} catch (Exception e) {
log.error(e.getMessage(),e);
}
// depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public void marshall(PatchGroupPatchBaselineMapping patchGroupPatchBaselineMapping, ProtocolMarshaller protocolMarshaller) {
if (patchGroupPatchBaselineMapping == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(patchGroupPatchBaselineMapping.getPatchGroup(), PATCHGROUP_BINDING);
protocolMarshaller.marshall(patchGroupPatchBaselineMapping.getBaselineIdentity(), BASELINEIDENTITY_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(PatchGroupPatchBaselineMapping patchGroupPatchBaselineMapping, ProtocolMarshaller protocolMarshaller) {
if (patchGroupPatchBaselineMapping == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(patchGroupPatchBaselineMapping.getPatchGroup(), PATCHGROUP_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(patchGroupPatchBaselineMapping.getBaselineIdentity(), BASELINEIDENTITY_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@SuppressWarnings("unchecked")
public void emitLogsAndMetrics(
@Nullable final Throwable e,
@Nullable final String remoteAddress,
final long bytesWritten
)
{
if (baseQuery == null) {
// Never initialized, don't log or emit anything.
return;
}
if (state == State.DONE) {
log.warn("Tried to emit logs and metrics twice for query[%s]!", baseQuery.getId());
}
state = State.DONE;
final boolean success = e == null;
try {
final long queryTimeNs = System.nanoTime() - startNs;
QueryMetrics queryMetrics = DruidMetrics.makeRequestMetrics(
queryMetricsFactory,
toolChest,
baseQuery,
StringUtils.nullToEmptyNonDruidDataString(remoteAddress)
);
queryMetrics.success(success);
queryMetrics.reportQueryTime(queryTimeNs);
if (bytesWritten >= 0) {
queryMetrics.reportQueryBytes(bytesWritten);
}
if (authenticationResult != null) {
queryMetrics.identity(authenticationResult.getIdentity());
}
queryMetrics.emit(emitter);
final Map<String, Object> statsMap = new LinkedHashMap<>();
statsMap.put("query/time", TimeUnit.NANOSECONDS.toMillis(queryTimeNs));
statsMap.put("query/bytes", bytesWritten);
statsMap.put("success", success);
if (authenticationResult != null) {
statsMap.put("identity", authenticationResult.getIdentity());
}
if (e != null) {
statsMap.put("exception", e.toString());
if (e instanceof QueryInterruptedException) {
// Mimic behavior from QueryResource, where this code was originally taken from.
log.warn(e, "Exception while processing queryId [%s]", baseQuery.getId());
statsMap.put("interrupted", true);
statsMap.put("reason", e.toString());
}
}
requestLogger.logNativeQuery(
RequestLogLine.forNative(
baseQuery,
DateTimes.utc(startMs),
StringUtils.nullToEmptyNonDruidDataString(remoteAddress),
new QueryStats(statsMap)
)
);
}
catch (Exception ex) {
log.error(ex, "Unable to log query [%s]!", baseQuery);
}
} } | public class class_name {
@SuppressWarnings("unchecked")
public void emitLogsAndMetrics(
@Nullable final Throwable e,
@Nullable final String remoteAddress,
final long bytesWritten
)
{
if (baseQuery == null) {
// Never initialized, don't log or emit anything.
return; // depends on control dependency: [if], data = [none]
}
if (state == State.DONE) {
log.warn("Tried to emit logs and metrics twice for query[%s]!", baseQuery.getId()); // depends on control dependency: [if], data = [none]
}
state = State.DONE;
final boolean success = e == null;
try {
final long queryTimeNs = System.nanoTime() - startNs;
QueryMetrics queryMetrics = DruidMetrics.makeRequestMetrics(
queryMetricsFactory,
toolChest,
baseQuery,
StringUtils.nullToEmptyNonDruidDataString(remoteAddress)
);
queryMetrics.success(success); // depends on control dependency: [try], data = [none]
queryMetrics.reportQueryTime(queryTimeNs); // depends on control dependency: [try], data = [none]
if (bytesWritten >= 0) {
queryMetrics.reportQueryBytes(bytesWritten); // depends on control dependency: [if], data = [(bytesWritten]
}
if (authenticationResult != null) {
queryMetrics.identity(authenticationResult.getIdentity()); // depends on control dependency: [if], data = [(authenticationResult]
}
queryMetrics.emit(emitter); // depends on control dependency: [try], data = [none]
final Map<String, Object> statsMap = new LinkedHashMap<>();
statsMap.put("query/time", TimeUnit.NANOSECONDS.toMillis(queryTimeNs)); // depends on control dependency: [try], data = [none]
statsMap.put("query/bytes", bytesWritten); // depends on control dependency: [try], data = [none]
statsMap.put("success", success); // depends on control dependency: [try], data = [none]
if (authenticationResult != null) {
statsMap.put("identity", authenticationResult.getIdentity()); // depends on control dependency: [if], data = [none]
}
if (e != null) {
statsMap.put("exception", e.toString()); // depends on control dependency: [if], data = [none]
if (e instanceof QueryInterruptedException) {
// Mimic behavior from QueryResource, where this code was originally taken from.
log.warn(e, "Exception while processing queryId [%s]", baseQuery.getId()); // depends on control dependency: [if], data = [none]
statsMap.put("interrupted", true); // depends on control dependency: [if], data = [none]
statsMap.put("reason", e.toString()); // depends on control dependency: [if], data = [none]
}
}
requestLogger.logNativeQuery(
RequestLogLine.forNative(
baseQuery,
DateTimes.utc(startMs),
StringUtils.nullToEmptyNonDruidDataString(remoteAddress),
new QueryStats(statsMap)
)
); // depends on control dependency: [try], data = [none]
}
catch (Exception ex) {
log.error(ex, "Unable to log query [%s]!", baseQuery);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static String getTagAttr(StringBuilder page, final String tag,
final String attr) {
String found = null;
Pattern daPattern = TagMagix.getPattern(tag, attr);
Matcher matcher = daPattern.matcher(page);
int idx = 0;
if (matcher.find(idx)) {
found = matcher.group(1);
found = trimAttrValue(found);
}
return found;
} } | public class class_name {
public static String getTagAttr(StringBuilder page, final String tag,
final String attr) {
String found = null;
Pattern daPattern = TagMagix.getPattern(tag, attr);
Matcher matcher = daPattern.matcher(page);
int idx = 0;
if (matcher.find(idx)) {
found = matcher.group(1); // depends on control dependency: [if], data = [none]
found = trimAttrValue(found); // depends on control dependency: [if], data = [none]
}
return found;
} } |
public class class_name {
public static long toLongValue(Object o) throws PageException {
if (o instanceof Boolean) return ((((Boolean) o).booleanValue()) ? 1L : 0L);
else if (o instanceof Number) return (((Number) o).longValue());
else if (o instanceof CharSequence) {
String str = o.toString();
try {
return Long.parseLong(str);
}
catch (NumberFormatException nfe) {
return (long) toDoubleValue(str);
}
}
else if (o instanceof Character) return (((Character) o).charValue());
else if (o instanceof Castable) return (long) ((Castable) o).castToDoubleValue();
else if (o instanceof ObjectWrap) return toLongValue(((ObjectWrap) o).getEmbededObject());
throw new CasterException(o, "long");
} } | public class class_name {
public static long toLongValue(Object o) throws PageException {
if (o instanceof Boolean) return ((((Boolean) o).booleanValue()) ? 1L : 0L);
else if (o instanceof Number) return (((Number) o).longValue());
else if (o instanceof CharSequence) {
String str = o.toString();
try {
return Long.parseLong(str); // depends on control dependency: [try], data = [none]
}
catch (NumberFormatException nfe) {
return (long) toDoubleValue(str);
} // depends on control dependency: [catch], data = [none]
}
else if (o instanceof Character) return (((Character) o).charValue());
else if (o instanceof Castable) return (long) ((Castable) o).castToDoubleValue();
else if (o instanceof ObjectWrap) return toLongValue(((ObjectWrap) o).getEmbededObject());
throw new CasterException(o, "long");
} } |
public class class_name {
public static Point get(Geometry geometry) {
// Get polygon from geometry
Polygon polygon;
if (geometry instanceof LineString) {
// Validity of LineString is checked outside of this scope
polygon = geometry.getFactory().createPolygon(geometry.getCoordinates());
} else if (geometry instanceof MultiPolygon) {
// For MultiPolygon the widest Polygon is taken
Geometry widestGeometry = geometry.getGeometryN(0);
for (int i = 1; i < geometry.getNumGeometries(); i++) {
if (geometry.getGeometryN(i).getEnvelopeInternal().getWidth() >
widestGeometry.getEnvelopeInternal().getWidth()) {
widestGeometry = geometry.getGeometryN(i);
}
}
polygon = (Polygon) widestGeometry;
} else if (geometry instanceof Polygon) {
polygon = (Polygon) geometry;
} else {
LOGGER.warning("Failed to get label for geometry: " + geometry);
return geometry.getCentroid();
}
// As original geometry is used in other places clone it before re-projecting
polygon = (Polygon) polygon.clone();
// Re-project coordinates. This is needed to get proper visual results for polygons
// distorted my Mercator projection
polygon.apply(new CoordinateFilter() {
@Override
public void filter(Coordinate c) {
c.x = longitudeToX(c.x);
c.y = latitudeToY(c.y);
}
});
polygon.geometryChanged();
Envelope envelope = polygon.getEnvelopeInternal();
double width = envelope.getWidth();
double height = envelope.getHeight();
double cellSize = Math.min(width, height);
double h = cellSize / 2;
// A priority queue of cells in order of their "potential" (max distance to polygon)
PriorityQueue<Cell> cellQueue = new PriorityQueue<>(1, new MaxComparator());
// Cover polygon with initial cells
for (double x = envelope.getMinX(); x < envelope.getMaxX(); x += cellSize) {
for (double y = envelope.getMinY(); y < envelope.getMaxY(); y += cellSize) {
cellQueue.add(new Cell(x + h, y + h, h, polygon));
}
}
// Take centroid as the first best guess
Cell bestCell = getCentroidCell(polygon);
// Special case for rectangular polygons
Cell bboxCell = new Cell(envelope.centre().x, envelope.centre().y, 0, polygon);
if (bboxCell.d > bestCell.d) bestCell = bboxCell;
while (!cellQueue.isEmpty()) {
// Pick the most promising cell from the queue
Cell cell = cellQueue.remove();
// Update the best cell if we found a better one
if (cell.d > bestCell.d)
bestCell = cell;
// Do not drill down further if there's no chance of a better solution
if (cell.max - bestCell.d <= PRECISION) continue;
// Split the cell into four cells
h = cell.h / 2;
cellQueue.add(new Cell(cell.x - h, cell.y - h, h, polygon));
cellQueue.add(new Cell(cell.x + h, cell.y - h, h, polygon));
cellQueue.add(new Cell(cell.x - h, cell.y + h, h, polygon));
cellQueue.add(new Cell(cell.x + h, cell.y + h, h, polygon));
}
// Return the best found point projected back to geodesic coordinates
return geometry.getFactory().createPoint(new Coordinate(toLongitude(bestCell.x), toLatitude(bestCell.y)));
} } | public class class_name {
public static Point get(Geometry geometry) {
// Get polygon from geometry
Polygon polygon;
if (geometry instanceof LineString) {
// Validity of LineString is checked outside of this scope
polygon = geometry.getFactory().createPolygon(geometry.getCoordinates()); // depends on control dependency: [if], data = [none]
} else if (geometry instanceof MultiPolygon) {
// For MultiPolygon the widest Polygon is taken
Geometry widestGeometry = geometry.getGeometryN(0);
for (int i = 1; i < geometry.getNumGeometries(); i++) {
if (geometry.getGeometryN(i).getEnvelopeInternal().getWidth() >
widestGeometry.getEnvelopeInternal().getWidth()) {
widestGeometry = geometry.getGeometryN(i); // depends on control dependency: [if], data = [none]
}
}
polygon = (Polygon) widestGeometry; // depends on control dependency: [if], data = [none]
} else if (geometry instanceof Polygon) {
polygon = (Polygon) geometry; // depends on control dependency: [if], data = [none]
} else {
LOGGER.warning("Failed to get label for geometry: " + geometry); // depends on control dependency: [if], data = [none]
return geometry.getCentroid(); // depends on control dependency: [if], data = [none]
}
// As original geometry is used in other places clone it before re-projecting
polygon = (Polygon) polygon.clone();
// Re-project coordinates. This is needed to get proper visual results for polygons
// distorted my Mercator projection
polygon.apply(new CoordinateFilter() {
@Override
public void filter(Coordinate c) {
c.x = longitudeToX(c.x);
c.y = latitudeToY(c.y);
}
});
polygon.geometryChanged();
Envelope envelope = polygon.getEnvelopeInternal();
double width = envelope.getWidth();
double height = envelope.getHeight();
double cellSize = Math.min(width, height);
double h = cellSize / 2;
// A priority queue of cells in order of their "potential" (max distance to polygon)
PriorityQueue<Cell> cellQueue = new PriorityQueue<>(1, new MaxComparator());
// Cover polygon with initial cells
for (double x = envelope.getMinX(); x < envelope.getMaxX(); x += cellSize) {
for (double y = envelope.getMinY(); y < envelope.getMaxY(); y += cellSize) {
cellQueue.add(new Cell(x + h, y + h, h, polygon)); // depends on control dependency: [for], data = [y]
}
}
// Take centroid as the first best guess
Cell bestCell = getCentroidCell(polygon);
// Special case for rectangular polygons
Cell bboxCell = new Cell(envelope.centre().x, envelope.centre().y, 0, polygon);
if (bboxCell.d > bestCell.d) bestCell = bboxCell;
while (!cellQueue.isEmpty()) {
// Pick the most promising cell from the queue
Cell cell = cellQueue.remove();
// Update the best cell if we found a better one
if (cell.d > bestCell.d)
bestCell = cell;
// Do not drill down further if there's no chance of a better solution
if (cell.max - bestCell.d <= PRECISION) continue;
// Split the cell into four cells
h = cell.h / 2; // depends on control dependency: [while], data = [none]
cellQueue.add(new Cell(cell.x - h, cell.y - h, h, polygon)); // depends on control dependency: [while], data = [none]
cellQueue.add(new Cell(cell.x + h, cell.y - h, h, polygon)); // depends on control dependency: [while], data = [none]
cellQueue.add(new Cell(cell.x - h, cell.y + h, h, polygon)); // depends on control dependency: [while], data = [none]
cellQueue.add(new Cell(cell.x + h, cell.y + h, h, polygon)); // depends on control dependency: [while], data = [none]
}
// Return the best found point projected back to geodesic coordinates
return geometry.getFactory().createPoint(new Coordinate(toLongitude(bestCell.x), toLatitude(bestCell.y)));
} } |
public class class_name {
public static StringBuffer insert(StringBuffer target, int offset16, int char32) {
String str = valueOf(char32);
if (offset16 != target.length() && bounds(target, offset16) == TRAIL_SURROGATE_BOUNDARY) {
offset16++;
}
target.insert(offset16, str);
return target;
} } | public class class_name {
public static StringBuffer insert(StringBuffer target, int offset16, int char32) {
String str = valueOf(char32);
if (offset16 != target.length() && bounds(target, offset16) == TRAIL_SURROGATE_BOUNDARY) {
offset16++; // depends on control dependency: [if], data = [none]
}
target.insert(offset16, str);
return target;
} } |
public class class_name {
public void marshall(AgentListEntry agentListEntry, ProtocolMarshaller protocolMarshaller) {
if (agentListEntry == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(agentListEntry.getAgentArn(), AGENTARN_BINDING);
protocolMarshaller.marshall(agentListEntry.getName(), NAME_BINDING);
protocolMarshaller.marshall(agentListEntry.getStatus(), STATUS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(AgentListEntry agentListEntry, ProtocolMarshaller protocolMarshaller) {
if (agentListEntry == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(agentListEntry.getAgentArn(), AGENTARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(agentListEntry.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(agentListEntry.getStatus(), STATUS_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
protected FileInfo getFileInfoFromInputDataImpl(String[] inputData)
{
//get amount
int amount=inputData.length;
FileInfo fileInfo=null;
if((amount%2)==0)
{
String argument=null;
String value=null;
for(int index=0;index<inputData.length;index=index+2)
{
//get next argument and value
argument=inputData[index];
value=inputData[index+1];
if(argument.equals("-file")) //only looks for file argument, the update fax job will validate the full command line arguments for correctness
{
//get file
File file=new File(value);
//create file info
fileInfo=new FileInfo(file);
break;
}
}
}
else
{
throw new FaxException("Invalid input provided, expected even number of arguments.");
}
return fileInfo;
} } | public class class_name {
@Override
protected FileInfo getFileInfoFromInputDataImpl(String[] inputData)
{
//get amount
int amount=inputData.length;
FileInfo fileInfo=null;
if((amount%2)==0)
{
String argument=null;
String value=null;
for(int index=0;index<inputData.length;index=index+2)
{
//get next argument and value
argument=inputData[index]; // depends on control dependency: [for], data = [index]
value=inputData[index+1]; // depends on control dependency: [for], data = [index]
if(argument.equals("-file")) //only looks for file argument, the update fax job will validate the full command line arguments for correctness
{
//get file
File file=new File(value);
//create file info
fileInfo=new FileInfo(file); // depends on control dependency: [if], data = [none]
break;
}
}
}
else
{
throw new FaxException("Invalid input provided, expected even number of arguments.");
}
return fileInfo;
} } |
public class class_name {
protected String extractFirstName(String name) {
if (StringUtils.isBlank(name)) {
return null;
}
int lastIndexOf = name.lastIndexOf(' ');
if (lastIndexOf == -1)
return null;
else
return name.substring(0, lastIndexOf);
} } | public class class_name {
protected String extractFirstName(String name) {
if (StringUtils.isBlank(name)) {
return null; // depends on control dependency: [if], data = [none]
}
int lastIndexOf = name.lastIndexOf(' ');
if (lastIndexOf == -1)
return null;
else
return name.substring(0, lastIndexOf);
} } |
public class class_name {
public void requestValue(int nodeId, int endpoint) {
ZWaveNode node = this.getNode(nodeId);
ZWaveGetCommands zwaveCommandClass = null;
SerialMessage serialMessage = null;
for (ZWaveCommandClass.CommandClass commandClass : new ZWaveCommandClass.CommandClass[] { ZWaveCommandClass.CommandClass.SENSOR_BINARY, ZWaveCommandClass.CommandClass.SENSOR_ALARM, ZWaveCommandClass.CommandClass.SENSOR_MULTILEVEL, ZWaveCommandClass.CommandClass.SWITCH_MULTILEVEL, ZWaveCommandClass.CommandClass.SWITCH_BINARY, ZWaveCommandClass.CommandClass.BASIC }) {
zwaveCommandClass = (ZWaveGetCommands)node.resolveCommandClass(commandClass, endpoint);
if (zwaveCommandClass != null)
break;
}
if (zwaveCommandClass == null) {
logger.error("No Command Class found on node {}, instance/endpoint {} to request value.", nodeId, endpoint);
return;
}
serialMessage = node.encapsulate(zwaveCommandClass.getValueMessage(), (ZWaveCommandClass)zwaveCommandClass, endpoint);
if (serialMessage != null)
this.sendData(serialMessage);
} } | public class class_name {
public void requestValue(int nodeId, int endpoint) {
ZWaveNode node = this.getNode(nodeId);
ZWaveGetCommands zwaveCommandClass = null;
SerialMessage serialMessage = null;
for (ZWaveCommandClass.CommandClass commandClass : new ZWaveCommandClass.CommandClass[] { ZWaveCommandClass.CommandClass.SENSOR_BINARY, ZWaveCommandClass.CommandClass.SENSOR_ALARM, ZWaveCommandClass.CommandClass.SENSOR_MULTILEVEL, ZWaveCommandClass.CommandClass.SWITCH_MULTILEVEL, ZWaveCommandClass.CommandClass.SWITCH_BINARY, ZWaveCommandClass.CommandClass.BASIC }) {
zwaveCommandClass = (ZWaveGetCommands)node.resolveCommandClass(commandClass, endpoint); // depends on control dependency: [for], data = [commandClass]
if (zwaveCommandClass != null)
break;
}
if (zwaveCommandClass == null) {
logger.error("No Command Class found on node {}, instance/endpoint {} to request value.", nodeId, endpoint); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
serialMessage = node.encapsulate(zwaveCommandClass.getValueMessage(), (ZWaveCommandClass)zwaveCommandClass, endpoint);
if (serialMessage != null)
this.sendData(serialMessage);
} } |
public class class_name {
@Nullable
public static String setVariantForView(@NonNull View view, @NonNull AttributeSet attrs) {
String previousVariant;
final TypedArray styledAttributes = view.getContext().getTheme()
.obtainStyledAttributes(attrs, R.styleable.View, 0, 0);
try {
previousVariant = setVariantForView(view, styledAttributes
.getString(R.styleable.View_variant));
} finally {
styledAttributes.recycle();
}
return previousVariant;
} } | public class class_name {
@Nullable
public static String setVariantForView(@NonNull View view, @NonNull AttributeSet attrs) {
String previousVariant;
final TypedArray styledAttributes = view.getContext().getTheme()
.obtainStyledAttributes(attrs, R.styleable.View, 0, 0);
try {
previousVariant = setVariantForView(view, styledAttributes
.getString(R.styleable.View_variant)); // depends on control dependency: [try], data = [none]
} finally {
styledAttributes.recycle();
}
return previousVariant;
} } |
public class class_name {
private void addCommonHandlers(ChannelPipeline pipeline) {
pipeline.addLast(Constants.DECOMPRESSOR_HANDLER, new HttpContentDecompressor());
if (httpTraceLogEnabled) {
pipeline.addLast(Constants.HTTP_TRACE_LOG_HANDLER,
new HttpTraceLoggingHandler(Constants.TRACE_LOG_UPSTREAM));
}
} } | public class class_name {
private void addCommonHandlers(ChannelPipeline pipeline) {
pipeline.addLast(Constants.DECOMPRESSOR_HANDLER, new HttpContentDecompressor());
if (httpTraceLogEnabled) {
pipeline.addLast(Constants.HTTP_TRACE_LOG_HANDLER,
new HttpTraceLoggingHandler(Constants.TRACE_LOG_UPSTREAM)); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public String encode(String... values) {
ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
int i = 0;
for (String value : values) {
builder.put("$" + i++, value);
}
// We will get an error if there are named bindings which are not reached by values.
return instantiate(builder.build());
} } | public class class_name {
public String encode(String... values) {
ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
int i = 0;
for (String value : values) {
builder.put("$" + i++, value); // depends on control dependency: [for], data = [value]
}
// We will get an error if there are named bindings which are not reached by values.
return instantiate(builder.build());
} } |
public class class_name {
private static void mergeApiQueryParamDoc(ApiQueryParam apiQueryParam, ApiParamDoc apiParamDoc) {
if (apiQueryParam != null) {
if (apiParamDoc.getName().trim().isEmpty()) {
apiParamDoc.setName(apiQueryParam.name());
}
apiParamDoc.setDescription(apiQueryParam.description());
apiParamDoc.setAllowedvalues(apiQueryParam.allowedvalues());
apiParamDoc.setFormat(apiQueryParam.format());
}
} } | public class class_name {
private static void mergeApiQueryParamDoc(ApiQueryParam apiQueryParam, ApiParamDoc apiParamDoc) {
if (apiQueryParam != null) {
if (apiParamDoc.getName().trim().isEmpty()) {
apiParamDoc.setName(apiQueryParam.name()); // depends on control dependency: [if], data = [none]
}
apiParamDoc.setDescription(apiQueryParam.description()); // depends on control dependency: [if], data = [(apiQueryParam]
apiParamDoc.setAllowedvalues(apiQueryParam.allowedvalues()); // depends on control dependency: [if], data = [(apiQueryParam]
apiParamDoc.setFormat(apiQueryParam.format()); // depends on control dependency: [if], data = [(apiQueryParam]
}
} } |
public class class_name {
public static Document getDomFromString( String str )
{
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware( true );
try {
DocumentBuilder builder = factory.newDocumentBuilder();
return builder.parse( new InputSource( new StringReader( str ) ) );
} catch ( Exception e ) {
throw new CStorageException( "Error parsing the Xml response: " + str, e );
}
} } | public class class_name {
public static Document getDomFromString( String str )
{
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware( true );
try {
DocumentBuilder builder = factory.newDocumentBuilder();
return builder.parse( new InputSource( new StringReader( str ) ) ); // depends on control dependency: [try], data = [none]
} catch ( Exception e ) {
throw new CStorageException( "Error parsing the Xml response: " + str, e );
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected boolean isTableConstraint( DdlTokenStream tokens ) throws ParsingException {
boolean result = false;
if ((tokens.matches("PRIMARY", "KEY")) || (tokens.matches("FOREIGN", "KEY")) || (tokens.matches("UNIQUE")) || tokens.matches(
CHECK)) {
result = true;
} else if (tokens.matches(CONSTRAINT)) {
if (tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, "UNIQUE")
|| tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, "PRIMARY", "KEY")
|| tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, "FOREIGN", "KEY")
|| tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, CHECK)) {
result = true;
}
}
return result;
} } | public class class_name {
protected boolean isTableConstraint( DdlTokenStream tokens ) throws ParsingException {
boolean result = false;
if ((tokens.matches("PRIMARY", "KEY")) || (tokens.matches("FOREIGN", "KEY")) || (tokens.matches("UNIQUE")) || tokens.matches(
CHECK)) {
result = true;
} else if (tokens.matches(CONSTRAINT)) {
if (tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, "UNIQUE")
|| tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, "PRIMARY", "KEY")
|| tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, "FOREIGN", "KEY")
|| tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, CHECK)) {
result = true; // depends on control dependency: [if], data = [none]
}
}
return result;
} } |
public class class_name {
private Mapping extractProjectsAndMapping(
Aggregate aggregate,
RelNode input,
RelBuilder relBuilder) {
// Compute which input fields are used.
final ImmutableBitSet.Builder inputFieldsUsed = getInputFieldUsed(aggregate, input);
final List<RexNode> projects = new ArrayList<>();
final Mapping mapping =
Mappings.create(MappingType.INVERSE_SURJECTION,
aggregate.getInput().getRowType().getFieldCount(),
inputFieldsUsed.cardinality());
int j = 0;
for (int i : inputFieldsUsed.build()) {
projects.add(relBuilder.field(i));
mapping.set(i, j++);
}
if (input instanceof Project) {
// this will not create trivial projects
relBuilder.project(projects);
} else {
relBuilder.project(projects, Collections.emptyList(), true);
}
return mapping;
} } | public class class_name {
private Mapping extractProjectsAndMapping(
Aggregate aggregate,
RelNode input,
RelBuilder relBuilder) {
// Compute which input fields are used.
final ImmutableBitSet.Builder inputFieldsUsed = getInputFieldUsed(aggregate, input);
final List<RexNode> projects = new ArrayList<>();
final Mapping mapping =
Mappings.create(MappingType.INVERSE_SURJECTION,
aggregate.getInput().getRowType().getFieldCount(),
inputFieldsUsed.cardinality());
int j = 0;
for (int i : inputFieldsUsed.build()) {
projects.add(relBuilder.field(i)); // depends on control dependency: [for], data = [i]
mapping.set(i, j++); // depends on control dependency: [for], data = [i]
}
if (input instanceof Project) {
// this will not create trivial projects
relBuilder.project(projects); // depends on control dependency: [if], data = [none]
} else {
relBuilder.project(projects, Collections.emptyList(), true); // depends on control dependency: [if], data = [none]
}
return mapping;
} } |
public class class_name {
synchronized public void addDTM(DTM dtm, int id, int offset)
{
if(id>=IDENT_MAX_DTMS)
{
// TODO: %REVIEW% Not really the right error message.
throw new DTMException(XMLMessages.createXMLMessage(XMLErrorResources.ER_NO_DTMIDS_AVAIL, null)); //"No more DTM IDs are available!");
}
// We used to just allocate the array size to IDENT_MAX_DTMS.
// But we expect to increase that to 16 bits, and I'm not willing
// to allocate that much space unless needed. We could use one of our
// handy-dandy Fast*Vectors, but this will do for now.
// %REVIEW%
int oldlen=m_dtms.length;
if(oldlen<=id)
{
// Various growth strategies are possible. I think we don't want
// to over-allocate excessively, and I'm willing to reallocate
// more often to get that. See also Fast*Vector classes.
//
// %REVIEW% Should throw a more diagnostic error if we go over the max...
int newlen=Math.min((id+256),IDENT_MAX_DTMS);
DTM new_m_dtms[] = new DTM[newlen];
System.arraycopy(m_dtms,0,new_m_dtms,0,oldlen);
m_dtms=new_m_dtms;
int new_m_dtm_offsets[] = new int[newlen];
System.arraycopy(m_dtm_offsets,0,new_m_dtm_offsets,0,oldlen);
m_dtm_offsets=new_m_dtm_offsets;
}
m_dtms[id] = dtm;
m_dtm_offsets[id]=offset;
dtm.documentRegistration();
// The DTM should have been told who its manager was when we created it.
// Do we need to allow for adopting DTMs _not_ created by this manager?
} } | public class class_name {
synchronized public void addDTM(DTM dtm, int id, int offset)
{
if(id>=IDENT_MAX_DTMS)
{
// TODO: %REVIEW% Not really the right error message.
throw new DTMException(XMLMessages.createXMLMessage(XMLErrorResources.ER_NO_DTMIDS_AVAIL, null)); //"No more DTM IDs are available!");
}
// We used to just allocate the array size to IDENT_MAX_DTMS.
// But we expect to increase that to 16 bits, and I'm not willing
// to allocate that much space unless needed. We could use one of our
// handy-dandy Fast*Vectors, but this will do for now.
// %REVIEW%
int oldlen=m_dtms.length;
if(oldlen<=id)
{
// Various growth strategies are possible. I think we don't want
// to over-allocate excessively, and I'm willing to reallocate
// more often to get that. See also Fast*Vector classes.
//
// %REVIEW% Should throw a more diagnostic error if we go over the max...
int newlen=Math.min((id+256),IDENT_MAX_DTMS);
DTM new_m_dtms[] = new DTM[newlen];
System.arraycopy(m_dtms,0,new_m_dtms,0,oldlen); // depends on control dependency: [if], data = [none]
m_dtms=new_m_dtms; // depends on control dependency: [if], data = [none]
int new_m_dtm_offsets[] = new int[newlen];
System.arraycopy(m_dtm_offsets,0,new_m_dtm_offsets,0,oldlen); // depends on control dependency: [if], data = [none]
m_dtm_offsets=new_m_dtm_offsets; // depends on control dependency: [if], data = [none]
}
m_dtms[id] = dtm;
m_dtm_offsets[id]=offset;
dtm.documentRegistration();
// The DTM should have been told who its manager was when we created it.
// Do we need to allow for adopting DTMs _not_ created by this manager?
} } |
public class class_name {
private void replaceMentionSpansWithPlaceholdersAsNecessary(@NonNull CharSequence text) {
int index = getSelectionStart();
int wordStart = findStartOfWord(text, index);
Editable editable = getText();
MentionSpan[] mentionSpansInCurrentWord = editable.getSpans(wordStart, index, MentionSpan.class);
for (MentionSpan span : mentionSpansInCurrentWord) {
if (span.getDisplayMode() != Mentionable.MentionDisplayMode.NONE) {
int spanStart = editable.getSpanStart(span);
int spanEnd = editable.getSpanEnd(span);
editable.setSpan(new PlaceholderSpan(span, spanStart, spanEnd),
spanStart, spanEnd, Spanned.SPAN_EXCLUSIVE_INCLUSIVE);
editable.removeSpan(span);
}
}
} } | public class class_name {
private void replaceMentionSpansWithPlaceholdersAsNecessary(@NonNull CharSequence text) {
int index = getSelectionStart();
int wordStart = findStartOfWord(text, index);
Editable editable = getText();
MentionSpan[] mentionSpansInCurrentWord = editable.getSpans(wordStart, index, MentionSpan.class);
for (MentionSpan span : mentionSpansInCurrentWord) {
if (span.getDisplayMode() != Mentionable.MentionDisplayMode.NONE) {
int spanStart = editable.getSpanStart(span);
int spanEnd = editable.getSpanEnd(span);
editable.setSpan(new PlaceholderSpan(span, spanStart, spanEnd),
spanStart, spanEnd, Spanned.SPAN_EXCLUSIVE_INCLUSIVE); // depends on control dependency: [if], data = [none]
editable.removeSpan(span); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
@Override
public EEnum getIfcSpaceHeaterTypeEnum() {
if (ifcSpaceHeaterTypeEnumEEnum == null) {
ifcSpaceHeaterTypeEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(1066);
}
return ifcSpaceHeaterTypeEnumEEnum;
} } | public class class_name {
@Override
public EEnum getIfcSpaceHeaterTypeEnum() {
if (ifcSpaceHeaterTypeEnumEEnum == null) {
ifcSpaceHeaterTypeEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(1066);
// depends on control dependency: [if], data = [none]
}
return ifcSpaceHeaterTypeEnumEEnum;
} } |
public class class_name {
@Override
public EClass getIfcBoundaryNodeCondition() {
if (ifcBoundaryNodeConditionEClass == null) {
ifcBoundaryNodeConditionEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(51);
}
return ifcBoundaryNodeConditionEClass;
} } | public class class_name {
@Override
public EClass getIfcBoundaryNodeCondition() {
if (ifcBoundaryNodeConditionEClass == null) {
ifcBoundaryNodeConditionEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(51);
// depends on control dependency: [if], data = [none]
}
return ifcBoundaryNodeConditionEClass;
} } |
public class class_name {
public synchronized WorldState next() throws Exception {
if (this.error != null) {
throw this.error;
}
// In case next() is called after completed with no states remaining
if (this.complete && this.states.isEmpty()) {
throw new IllegalStateException("No world states remaining.");
}
try {
// Will block until something is available
WorldState ws = this.states.take();
if (ws != RESPONSE_COMPLETE) {
return ws;
}
} catch (InterruptedException e) {
// This should only happen once the request is complete
}
// If we were interrupted because of an exception, be sure to throw it
if (this.error != null) {
throw this.error;
}
// At this point, we should have an empty queue
throw new IllegalStateException("Called next() on an empty StepResponse.");
} } | public class class_name {
public synchronized WorldState next() throws Exception {
if (this.error != null) {
throw this.error;
}
// In case next() is called after completed with no states remaining
if (this.complete && this.states.isEmpty()) {
throw new IllegalStateException("No world states remaining.");
}
try {
// Will block until something is available
WorldState ws = this.states.take();
if (ws != RESPONSE_COMPLETE) {
return ws; // depends on control dependency: [if], data = [none]
}
} catch (InterruptedException e) {
// This should only happen once the request is complete
}
// If we were interrupted because of an exception, be sure to throw it
if (this.error != null) {
throw this.error;
}
// At this point, we should have an empty queue
throw new IllegalStateException("Called next() on an empty StepResponse.");
} } |
public class class_name {
static public FeatureCollectionConfig getConfigFromSnippet(String filename) {
org.jdom2.Document doc;
try {
SAXBuilder builder = new SAXBuilder();
doc = builder.build(filename);
} catch (Exception e) {
System.out.printf("Error parsing featureCollection %s err = %s", filename, e.getMessage());
return null;
}
return FeatureCollectionReader.readFeatureCollection(doc.getRootElement());
} } | public class class_name {
static public FeatureCollectionConfig getConfigFromSnippet(String filename) {
org.jdom2.Document doc;
try {
SAXBuilder builder = new SAXBuilder();
doc = builder.build(filename); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
System.out.printf("Error parsing featureCollection %s err = %s", filename, e.getMessage());
return null;
} // depends on control dependency: [catch], data = [none]
return FeatureCollectionReader.readFeatureCollection(doc.getRootElement());
} } |
public class class_name {
public java.util.List<String> getProductDescriptions() {
if (productDescriptions == null) {
productDescriptions = new com.amazonaws.internal.SdkInternalList<String>();
}
return productDescriptions;
} } | public class class_name {
public java.util.List<String> getProductDescriptions() {
if (productDescriptions == null) {
productDescriptions = new com.amazonaws.internal.SdkInternalList<String>(); // depends on control dependency: [if], data = [none]
}
return productDescriptions;
} } |
public class class_name {
private void cancelPendingSlotRequest(PendingSlotRequest pendingSlotRequest) {
CompletableFuture<Acknowledge> request = pendingSlotRequest.getRequestFuture();
returnPendingTaskManagerSlotIfAssigned(pendingSlotRequest);
if (null != request) {
request.cancel(false);
}
} } | public class class_name {
private void cancelPendingSlotRequest(PendingSlotRequest pendingSlotRequest) {
CompletableFuture<Acknowledge> request = pendingSlotRequest.getRequestFuture();
returnPendingTaskManagerSlotIfAssigned(pendingSlotRequest);
if (null != request) {
request.cancel(false); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void setParameterOverrides(java.util.Collection<Parameter> parameterOverrides) {
if (parameterOverrides == null) {
this.parameterOverrides = null;
return;
}
this.parameterOverrides = new com.amazonaws.internal.SdkInternalList<Parameter>(parameterOverrides);
} } | public class class_name {
public void setParameterOverrides(java.util.Collection<Parameter> parameterOverrides) {
if (parameterOverrides == null) {
this.parameterOverrides = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.parameterOverrides = new com.amazonaws.internal.SdkInternalList<Parameter>(parameterOverrides);
} } |
public class class_name {
public static Collection getLabelsFromMap(Collection sortedKeys, Map keysAndLabels)
{
Collection<Object> labels = new ArrayList<Object>();
Iterator keyIt = sortedKeys.iterator();
while (keyIt.hasNext())
{
labels.add(keysAndLabels.get(keyIt.next()));
}
return labels;
} } | public class class_name {
public static Collection getLabelsFromMap(Collection sortedKeys, Map keysAndLabels)
{
Collection<Object> labels = new ArrayList<Object>();
Iterator keyIt = sortedKeys.iterator();
while (keyIt.hasNext())
{
labels.add(keysAndLabels.get(keyIt.next())); // depends on control dependency: [while], data = [none]
}
return labels;
} } |
public class class_name {
public static void initialize(SingletonProvider instance) {
synchronized (SingletonProvider.class) {
if (INSTANCE == null) {
INSTANCE = instance;
} else {
throw new RuntimeException("SingletonProvider is already initialized with " + INSTANCE);
}
}
} } | public class class_name {
public static void initialize(SingletonProvider instance) {
synchronized (SingletonProvider.class) {
if (INSTANCE == null) {
INSTANCE = instance; // depends on control dependency: [if], data = [none]
} else {
throw new RuntimeException("SingletonProvider is already initialized with " + INSTANCE);
}
}
} } |
public class class_name {
public String getConfigurationOptionValue (String optionName, String defaultValue) {
String optionValue;
Node configurationOption = this.getConfigurationOption (optionName);
if (configurationOption != null) {
optionValue = configurationOption.getTextContent ();
} else {
optionValue = defaultValue;
}
return optionValue;
} } | public class class_name {
public String getConfigurationOptionValue (String optionName, String defaultValue) {
String optionValue;
Node configurationOption = this.getConfigurationOption (optionName);
if (configurationOption != null) {
optionValue = configurationOption.getTextContent (); // depends on control dependency: [if], data = [none]
} else {
optionValue = defaultValue; // depends on control dependency: [if], data = [none]
}
return optionValue;
} } |
public class class_name {
private Date toDate(final String pStringDate) {
// weird manipulation to parse the date... remove ':' from the timezone
// before: 2011-07-12T22:42:40.000+02:00
// after: 2011-07-12T22:42:40.000+0200
final StringBuilder _date = new StringBuilder();
_date.append(pStringDate.substring(0, pStringDate.length() - 3));
_date.append(pStringDate.substring(pStringDate.length() - 2));
try {
return new SimpleDateFormat(RFC_339_DATE_FORMAT).parse(_date.toString());
} catch (final ParseException e) {
throw new IllegalArgumentException(
"The given spreadsheet ListEntry usercrashdate field value is malformed", e);
}
} } | public class class_name {
private Date toDate(final String pStringDate) {
// weird manipulation to parse the date... remove ':' from the timezone
// before: 2011-07-12T22:42:40.000+02:00
// after: 2011-07-12T22:42:40.000+0200
final StringBuilder _date = new StringBuilder();
_date.append(pStringDate.substring(0, pStringDate.length() - 3));
_date.append(pStringDate.substring(pStringDate.length() - 2));
try {
return new SimpleDateFormat(RFC_339_DATE_FORMAT).parse(_date.toString()); // depends on control dependency: [try], data = [none]
} catch (final ParseException e) {
throw new IllegalArgumentException(
"The given spreadsheet ListEntry usercrashdate field value is malformed", e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static Type[] getActualTypeArguments(Class<?> clazz) {
Type type = Types.getCanonicalType(clazz);
if (type instanceof ParameterizedType) {
return ((ParameterizedType) type).getActualTypeArguments();
} else {
return EMPTY_TYPES;
}
} } | public class class_name {
public static Type[] getActualTypeArguments(Class<?> clazz) {
Type type = Types.getCanonicalType(clazz);
if (type instanceof ParameterizedType) {
return ((ParameterizedType) type).getActualTypeArguments(); // depends on control dependency: [if], data = [none]
} else {
return EMPTY_TYPES; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public Observable<Boolean> upsertMessageStatuses(String conversationId, String profileId, List<MessageStatusUpdate> msgStatusList) {
return asObservable(new Executor<Boolean>() {
@Override
void execute(ChatStore store, Emitter<Boolean> emitter) {
store.beginTransaction();
boolean isSuccess = false;
for (MessageStatusUpdate statusUpdate : msgStatusList) {
for (String messageId : statusUpdate.getMessageIds()) {
LocalMessageStatus status = null;
if (MessageStatus.delivered.name().equals(statusUpdate.getStatus())) {
status = LocalMessageStatus.delivered;
} else if (MessageStatus.read.name().equals(statusUpdate.getStatus())) {
status = LocalMessageStatus.read;
}
if (status != null) {
isSuccess = store.update(ChatMessageStatus.builder().populate(conversationId, messageId, profileId, status, DateHelper.getUTCMilliseconds(statusUpdate.getTimestamp()), null).build());
}
}
}
store.endTransaction();
emitter.onNext(isSuccess);
emitter.onCompleted();
}
});
} } | public class class_name {
public Observable<Boolean> upsertMessageStatuses(String conversationId, String profileId, List<MessageStatusUpdate> msgStatusList) {
return asObservable(new Executor<Boolean>() {
@Override
void execute(ChatStore store, Emitter<Boolean> emitter) {
store.beginTransaction();
boolean isSuccess = false;
for (MessageStatusUpdate statusUpdate : msgStatusList) {
for (String messageId : statusUpdate.getMessageIds()) {
LocalMessageStatus status = null;
if (MessageStatus.delivered.name().equals(statusUpdate.getStatus())) {
status = LocalMessageStatus.delivered; // depends on control dependency: [if], data = [none]
} else if (MessageStatus.read.name().equals(statusUpdate.getStatus())) {
status = LocalMessageStatus.read; // depends on control dependency: [if], data = [none]
}
if (status != null) {
isSuccess = store.update(ChatMessageStatus.builder().populate(conversationId, messageId, profileId, status, DateHelper.getUTCMilliseconds(statusUpdate.getTimestamp()), null).build()); // depends on control dependency: [if], data = [(status]
}
}
}
store.endTransaction();
emitter.onNext(isSuccess);
emitter.onCompleted();
}
});
} } |
public class class_name {
private boolean checkID(long id) {
// Ensure that the given ID is a monotonically increasing ID.
// If the ID is less than the last received ID then reset the
// last received ID since the connection must have been reset.
if (lastReceived == 0 || id == lastReceived + 1 || id < lastReceived) {
lastReceived = id;
// If the ID reaches the end of the current batch then tell the data
// source that it's okay to remove all previous messages.
if (lastReceived % BATCH_SIZE == 0) {
ack();
}
return true;
} else {
fail();
}
return false;
} } | public class class_name {
private boolean checkID(long id) {
// Ensure that the given ID is a monotonically increasing ID.
// If the ID is less than the last received ID then reset the
// last received ID since the connection must have been reset.
if (lastReceived == 0 || id == lastReceived + 1 || id < lastReceived) {
lastReceived = id; // depends on control dependency: [if], data = [none]
// If the ID reaches the end of the current batch then tell the data
// source that it's okay to remove all previous messages.
if (lastReceived % BATCH_SIZE == 0) {
ack(); // depends on control dependency: [if], data = [none]
}
return true; // depends on control dependency: [if], data = [none]
} else {
fail(); // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
AbstractBrowsableClasspathResource navigate(ResourcePathNode<Void> path, boolean create) {
ClasspathFolder folder = this;
List<ResourcePathNode<Void>> pathList = path.asList();
for (ResourcePathNode<Void> node : pathList) {
String nodeName = node.getName();
if (node.isRoot()) {
if (node.isAbsolute()) {
if (node == ResourcePathNode.ROOT_ABSOLUTE) {
folder = getRoot();
} else {
throw new IllegalArgumentException(nodeName);
}
}
} else if (node.isParentDirectory()) {
if (!folder.isRoot()) {
folder = folder.getParent();
}
} else {
AbstractBrowsableClasspathResource childResource = folder.getChildResource(nodeName);
if (childResource == null) {
if (!create) {
return null;
}
if ((node == path) && (nodeName.indexOf('.') >= 0)) {
childResource = new ClasspathFile(folder, nodeName);
} else {
childResource = new ClasspathFolder(folder, nodeName);
}
}
if (childResource.isFolder()) {
folder = (ClasspathFolder) childResource;
} else if (node == path) {
return childResource;
} else {
// actually illegal classpath
return null;
}
}
}
return folder;
} } | public class class_name {
AbstractBrowsableClasspathResource navigate(ResourcePathNode<Void> path, boolean create) {
ClasspathFolder folder = this;
List<ResourcePathNode<Void>> pathList = path.asList();
for (ResourcePathNode<Void> node : pathList) {
String nodeName = node.getName();
if (node.isRoot()) {
if (node.isAbsolute()) {
if (node == ResourcePathNode.ROOT_ABSOLUTE) {
folder = getRoot(); // depends on control dependency: [if], data = [none]
} else {
throw new IllegalArgumentException(nodeName);
}
}
} else if (node.isParentDirectory()) {
if (!folder.isRoot()) {
folder = folder.getParent(); // depends on control dependency: [if], data = [none]
}
} else {
AbstractBrowsableClasspathResource childResource = folder.getChildResource(nodeName);
if (childResource == null) {
if (!create) {
return null; // depends on control dependency: [if], data = [none]
}
if ((node == path) && (nodeName.indexOf('.') >= 0)) {
childResource = new ClasspathFile(folder, nodeName); // depends on control dependency: [if], data = [none]
} else {
childResource = new ClasspathFolder(folder, nodeName); // depends on control dependency: [if], data = [none]
}
}
if (childResource.isFolder()) {
folder = (ClasspathFolder) childResource; // depends on control dependency: [if], data = [none]
} else if (node == path) {
return childResource; // depends on control dependency: [if], data = [none]
} else {
// actually illegal classpath
return null; // depends on control dependency: [if], data = [none]
}
}
}
return folder;
} } |
public class class_name {
@Override
protected int onExecuteUpdate() {
if (kunderaQuery.isDeleteUpdate()) {
List result = getResultList();
return result != null ? result.size() : 0;
}
return 0;
} } | public class class_name {
@Override
protected int onExecuteUpdate() {
if (kunderaQuery.isDeleteUpdate()) {
List result = getResultList();
return result != null ? result.size() : 0;
// depends on control dependency: [if], data = [none]
}
return 0;
} } |
public class class_name {
public SearchProductsResult withProductViewSummaries(ProductViewSummary... productViewSummaries) {
if (this.productViewSummaries == null) {
setProductViewSummaries(new java.util.ArrayList<ProductViewSummary>(productViewSummaries.length));
}
for (ProductViewSummary ele : productViewSummaries) {
this.productViewSummaries.add(ele);
}
return this;
} } | public class class_name {
public SearchProductsResult withProductViewSummaries(ProductViewSummary... productViewSummaries) {
if (this.productViewSummaries == null) {
setProductViewSummaries(new java.util.ArrayList<ProductViewSummary>(productViewSummaries.length)); // depends on control dependency: [if], data = [none]
}
for (ProductViewSummary ele : productViewSummaries) {
this.productViewSummaries.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.