code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
@Override
public EClass getIfcReinforcementDefinitionProperties() {
if (ifcReinforcementDefinitionPropertiesEClass == null) {
ifcReinforcementDefinitionPropertiesEClass = (EClass) EPackage.Registry.INSTANCE
.getEPackage(Ifc4Package.eNS_URI).getEClassifiers().get(510);
}
return ifcReinforcementDefinitionPropertiesEClass;
} } | public class class_name {
@Override
public EClass getIfcReinforcementDefinitionProperties() {
if (ifcReinforcementDefinitionPropertiesEClass == null) {
ifcReinforcementDefinitionPropertiesEClass = (EClass) EPackage.Registry.INSTANCE
.getEPackage(Ifc4Package.eNS_URI).getEClassifiers().get(510);
// depends on control dependency: [if], data = [none]
}
return ifcReinforcementDefinitionPropertiesEClass;
} } |
public class class_name {
public List<Violation> getGuaranteeTermViolations(Agreement agreement, GuaranteeTerm guaranteeTerm) {
String json = getJerseyClient().target(getEndpoint() + "/violations?agreementId=" + agreement.getAgreementId() + "&guaranteeTerm=" + guaranteeTerm.getName()).request()
.header("Accept", MediaType.APPLICATION_JSON)
.header("Content-Type", MediaType.APPLICATION_JSON)
.buildGet().invoke().readEntity(String.class);
try {
return mapper.readValue(json, new TypeReference<List<Violation>>(){});
} catch (IOException e) {
/*
* TODO: Change Runtime for a DashboardException
*/
throw new RuntimeException(e);
}
} } | public class class_name {
public List<Violation> getGuaranteeTermViolations(Agreement agreement, GuaranteeTerm guaranteeTerm) {
String json = getJerseyClient().target(getEndpoint() + "/violations?agreementId=" + agreement.getAgreementId() + "&guaranteeTerm=" + guaranteeTerm.getName()).request()
.header("Accept", MediaType.APPLICATION_JSON)
.header("Content-Type", MediaType.APPLICATION_JSON)
.buildGet().invoke().readEntity(String.class);
try {
return mapper.readValue(json, new TypeReference<List<Violation>>(){}); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
/*
* TODO: Change Runtime for a DashboardException
*/
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected SortedMap<String, String> getProperties(ITextNode[]nodes, String language, Status[] status) {
TreeMap<String, String> map = new TreeMap<>();
for (ITextNode node : nodes) {
IValueNode valueNode = node.getValueNode(language);
if (valueNode != null) {
if (status == null || TremaCoreUtil.containsStatus(valueNode.getStatus(), status)) {
IKeyValuePair keyValuePair = new KeyValuePair(node.getKey(), valueNode.getValue());
if (iExportFilters != null) {
for (IExportFilter filter : iExportFilters) {
filter.filter(keyValuePair);
}
}
map.put(keyValuePair.getKey(), keyValuePair.getValue());
}
}
}
return map;
} } | public class class_name {
protected SortedMap<String, String> getProperties(ITextNode[]nodes, String language, Status[] status) {
TreeMap<String, String> map = new TreeMap<>();
for (ITextNode node : nodes) {
IValueNode valueNode = node.getValueNode(language);
if (valueNode != null) {
if (status == null || TremaCoreUtil.containsStatus(valueNode.getStatus(), status)) {
IKeyValuePair keyValuePair = new KeyValuePair(node.getKey(), valueNode.getValue());
if (iExportFilters != null) {
for (IExportFilter filter : iExportFilters) {
filter.filter(keyValuePair); // depends on control dependency: [for], data = [filter]
}
}
map.put(keyValuePair.getKey(), keyValuePair.getValue()); // depends on control dependency: [if], data = [none]
}
}
}
return map;
} } |
public class class_name {
@Override
public HttpFilterBuilder containsValue(final String name, final String value) {
return addFilter(new RequestFilter<HttpRequest>() {
@Override
public boolean matches(HttpRequest request) {
List<String> values = request.getHeaders(name);
for (String val : values) {
if (val.equals(values)) {
return true;
}
}
return false;
}
@Override
public String toString() {
return String.format("containsValue('%s', '%s')", name, value);
}
});
} } | public class class_name {
@Override
public HttpFilterBuilder containsValue(final String name, final String value) {
return addFilter(new RequestFilter<HttpRequest>() {
@Override
public boolean matches(HttpRequest request) {
List<String> values = request.getHeaders(name);
for (String val : values) {
if (val.equals(values)) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
}
@Override
public String toString() {
return String.format("containsValue('%s', '%s')", name, value);
}
});
} } |
public class class_name {
public java.util.List<String> getInstanceTypes() {
if (instanceTypes == null) {
instanceTypes = new com.amazonaws.internal.SdkInternalList<String>();
}
return instanceTypes;
} } | public class class_name {
public java.util.List<String> getInstanceTypes() {
if (instanceTypes == null) {
instanceTypes = new com.amazonaws.internal.SdkInternalList<String>(); // depends on control dependency: [if], data = [none]
}
return instanceTypes;
} } |
public class class_name {
@Override
public Partition getInitialPartition() {
if (ignoreElements) {
int n = atomContainer.getAtomCount();
return Partition.unit(n);
}
Map<String, SortedSet<Integer>> cellMap = new HashMap<String, SortedSet<Integer>>();
int numberOfAtoms = atomContainer.getAtomCount();
for (int atomIndex = 0; atomIndex < numberOfAtoms; atomIndex++) {
String symbol = atomContainer.getAtom(atomIndex).getSymbol();
SortedSet<Integer> cell;
if (cellMap.containsKey(symbol)) {
cell = cellMap.get(symbol);
} else {
cell = new TreeSet<Integer>();
cellMap.put(symbol, cell);
}
cell.add(atomIndex);
}
List<String> atomSymbols = new ArrayList<String>(cellMap.keySet());
Collections.sort(atomSymbols);
Partition elementPartition = new Partition();
for (String key : atomSymbols) {
SortedSet<Integer> cell = cellMap.get(key);
elementPartition.addCell(cell);
}
return elementPartition;
} } | public class class_name {
@Override
public Partition getInitialPartition() {
if (ignoreElements) {
int n = atomContainer.getAtomCount();
return Partition.unit(n); // depends on control dependency: [if], data = [none]
}
Map<String, SortedSet<Integer>> cellMap = new HashMap<String, SortedSet<Integer>>();
int numberOfAtoms = atomContainer.getAtomCount();
for (int atomIndex = 0; atomIndex < numberOfAtoms; atomIndex++) {
String symbol = atomContainer.getAtom(atomIndex).getSymbol();
SortedSet<Integer> cell;
if (cellMap.containsKey(symbol)) {
cell = cellMap.get(symbol); // depends on control dependency: [if], data = [none]
} else {
cell = new TreeSet<Integer>(); // depends on control dependency: [if], data = [none]
cellMap.put(symbol, cell); // depends on control dependency: [if], data = [none]
}
cell.add(atomIndex); // depends on control dependency: [for], data = [atomIndex]
}
List<String> atomSymbols = new ArrayList<String>(cellMap.keySet());
Collections.sort(atomSymbols);
Partition elementPartition = new Partition();
for (String key : atomSymbols) {
SortedSet<Integer> cell = cellMap.get(key);
elementPartition.addCell(cell); // depends on control dependency: [for], data = [none]
}
return elementPartition;
} } |
public class class_name {
public void marshall(GetFaceSearchRequest getFaceSearchRequest, ProtocolMarshaller protocolMarshaller) {
if (getFaceSearchRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getFaceSearchRequest.getJobId(), JOBID_BINDING);
protocolMarshaller.marshall(getFaceSearchRequest.getMaxResults(), MAXRESULTS_BINDING);
protocolMarshaller.marshall(getFaceSearchRequest.getNextToken(), NEXTTOKEN_BINDING);
protocolMarshaller.marshall(getFaceSearchRequest.getSortBy(), SORTBY_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(GetFaceSearchRequest getFaceSearchRequest, ProtocolMarshaller protocolMarshaller) {
if (getFaceSearchRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getFaceSearchRequest.getJobId(), JOBID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(getFaceSearchRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(getFaceSearchRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(getFaceSearchRequest.getSortBy(), SORTBY_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private void setResourceTypes(Map<String, String[]> types) {
if ((types == null) || (types.size() == 0)) {
setValidConfiguration(false);
LOG.error(Messages.get().getBundle().key(Messages.LOG_NO_RESOURCE_TYPES_0));
}
m_resTypes = types;
} } | public class class_name {
private void setResourceTypes(Map<String, String[]> types) {
if ((types == null) || (types.size() == 0)) {
setValidConfiguration(false); // depends on control dependency: [if], data = [none]
LOG.error(Messages.get().getBundle().key(Messages.LOG_NO_RESOURCE_TYPES_0)); // depends on control dependency: [if], data = [none]
}
m_resTypes = types;
} } |
public class class_name {
public static Vector getQNameProperties(String key, Properties props)
{
String s = props.getProperty(key);
if (null != s)
{
Vector v = new Vector();
int l = s.length();
boolean inCurly = false;
FastStringBuffer buf = new FastStringBuffer();
// parse through string, breaking on whitespaces. I do this instead
// of a tokenizer so I can track whitespace inside of curly brackets,
// which theoretically shouldn't happen if they contain legal URLs.
for (int i = 0; i < l; i++)
{
char c = s.charAt(i);
if (Character.isWhitespace(c))
{
if (!inCurly)
{
if (buf.length() > 0)
{
QName qname = QName.getQNameFromString(buf.toString());
v.addElement(qname);
buf.reset();
}
continue;
}
}
else if ('{' == c)
inCurly = true;
else if ('}' == c)
inCurly = false;
buf.append(c);
}
if (buf.length() > 0)
{
QName qname = QName.getQNameFromString(buf.toString());
v.addElement(qname);
buf.reset();
}
return v;
}
else
return null;
} } | public class class_name {
public static Vector getQNameProperties(String key, Properties props)
{
String s = props.getProperty(key);
if (null != s)
{
Vector v = new Vector();
int l = s.length();
boolean inCurly = false;
FastStringBuffer buf = new FastStringBuffer();
// parse through string, breaking on whitespaces. I do this instead
// of a tokenizer so I can track whitespace inside of curly brackets,
// which theoretically shouldn't happen if they contain legal URLs.
for (int i = 0; i < l; i++)
{
char c = s.charAt(i);
if (Character.isWhitespace(c))
{
if (!inCurly)
{
if (buf.length() > 0)
{
QName qname = QName.getQNameFromString(buf.toString());
v.addElement(qname); // depends on control dependency: [if], data = [none]
buf.reset(); // depends on control dependency: [if], data = [none]
}
continue;
}
}
else if ('{' == c)
inCurly = true;
else if ('}' == c)
inCurly = false;
buf.append(c); // depends on control dependency: [for], data = [none]
}
if (buf.length() > 0)
{
QName qname = QName.getQNameFromString(buf.toString());
v.addElement(qname); // depends on control dependency: [if], data = [none]
buf.reset(); // depends on control dependency: [if], data = [none]
}
return v; // depends on control dependency: [if], data = [none]
}
else
return null;
} } |
public class class_name {
@Deprecated
final static public void spacePad(StringBuffer sbuf, int length) {
while (length >= 32) {
sbuf.append(SPACES[5]);
length -= 32;
}
for (int i = 4; i >= 0; i--) {
if ((length & (1 << i)) != 0) {
sbuf.append(SPACES[i]);
}
}
} } | public class class_name {
@Deprecated
final static public void spacePad(StringBuffer sbuf, int length) {
while (length >= 32) {
sbuf.append(SPACES[5]); // depends on control dependency: [while], data = [none]
length -= 32; // depends on control dependency: [while], data = [none]
}
for (int i = 4; i >= 0; i--) {
if ((length & (1 << i)) != 0) {
sbuf.append(SPACES[i]); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
protected void bicoCFUpdate(ClusteringTreeNode x) {
// Starts with the global root node as the current root node
ClusteringTreeNode r = this.root;
int i = 1;
while (true) {
ClusteringTreeNode y = r.nearestChild(x.getCenter());
// Checks if the node can not be merged to the current level
if (r.hasNoChildren()
|| y == null
|| Metric.distanceSquared(x.getCenter(), y.getCenter()) > calcRSquared(i)) {
// Adds the node to the current root node
x.setThreshold(calcR(i));
r.addChild(x);
this.rootCount++;
break;
} else {
// Checks if the node can be merged to the nearest node without
// exceeding the global threshold
if (y.getClusteringFeature().calcKMeansCosts(y.getCenter(),
x.getClusteringFeature()) <= this.T) {
// Merges the ClusteringFeature of the node to the
// ClusteringFeature of the nearest node
y.getClusteringFeature().merge(x.getClusteringFeature());
break;
} else {
// Navigates one level down in the tree
r = y;
i++;
}
}
}
} } | public class class_name {
protected void bicoCFUpdate(ClusteringTreeNode x) {
// Starts with the global root node as the current root node
ClusteringTreeNode r = this.root;
int i = 1;
while (true) {
ClusteringTreeNode y = r.nearestChild(x.getCenter());
// Checks if the node can not be merged to the current level
if (r.hasNoChildren()
|| y == null
|| Metric.distanceSquared(x.getCenter(), y.getCenter()) > calcRSquared(i)) {
// Adds the node to the current root node
x.setThreshold(calcR(i)); // depends on control dependency: [if], data = [none]
r.addChild(x); // depends on control dependency: [if], data = [none]
this.rootCount++; // depends on control dependency: [if], data = [none]
break;
} else {
// Checks if the node can be merged to the nearest node without
// exceeding the global threshold
if (y.getClusteringFeature().calcKMeansCosts(y.getCenter(),
x.getClusteringFeature()) <= this.T) {
// Merges the ClusteringFeature of the node to the
// ClusteringFeature of the nearest node
y.getClusteringFeature().merge(x.getClusteringFeature()); // depends on control dependency: [if], data = [none]
break;
} else {
// Navigates one level down in the tree
r = y; // depends on control dependency: [if], data = [none]
i++; // depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
public void genStat(JCTree tree, Env<GenContext> env) {
if (code.isAlive()) {
code.statBegin(tree.pos);
genDef(tree, env);
} else if (env.info.isSwitch && tree.hasTag(VARDEF)) {
// variables whose declarations are in a switch
// can be used even if the decl is unreachable.
code.newLocal(((JCVariableDecl) tree).sym);
}
} } | public class class_name {
public void genStat(JCTree tree, Env<GenContext> env) {
if (code.isAlive()) {
code.statBegin(tree.pos); // depends on control dependency: [if], data = [none]
genDef(tree, env); // depends on control dependency: [if], data = [none]
} else if (env.info.isSwitch && tree.hasTag(VARDEF)) {
// variables whose declarations are in a switch
// can be used even if the decl is unreachable.
code.newLocal(((JCVariableDecl) tree).sym); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
protected synchronized final Transaction getExternalTransaction()
{
final String methodName = "getExternalTransaction";
if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled())
trace.entry(this,
cclass,
methodName);
Transaction transaction = null; // For return.
if (transactionReference != null)
transaction = (Transaction) transactionReference.get();
if (transaction == null) {
transaction = new Transaction(this);
// Make a WeakReference that becomes Enqueued as a result of the external Transaction becoming unreferenced.
transactionReference = new TransactionReference(this,
transaction);
} // if (transaction == null).
if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled())
trace.exit(this,
cclass,
methodName,
new Object[] { transaction });
return transaction;
} } | public class class_name {
protected synchronized final Transaction getExternalTransaction()
{
final String methodName = "getExternalTransaction";
if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled())
trace.entry(this,
cclass,
methodName);
Transaction transaction = null; // For return.
if (transactionReference != null)
transaction = (Transaction) transactionReference.get();
if (transaction == null) {
transaction = new Transaction(this); // depends on control dependency: [if], data = [none]
// Make a WeakReference that becomes Enqueued as a result of the external Transaction becoming unreferenced.
transactionReference = new TransactionReference(this,
transaction); // depends on control dependency: [if], data = [none]
} // if (transaction == null).
if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled())
trace.exit(this,
cclass,
methodName,
new Object[] { transaction });
return transaction;
} } |
public class class_name {
private static boolean contains(int[] values, int searchValue) {
Objects.requireNonNull(values);
for (int value : values) {
if (searchValue == value) {
return true;
}
}
return false;
} } | public class class_name {
private static boolean contains(int[] values, int searchValue) {
Objects.requireNonNull(values);
for (int value : values) {
if (searchValue == value) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
public static byte[] getContentFromUrl(URL url, Map inCookies, Map outCookies, boolean allowAllCerts)
{
URLConnection c = null;
try
{
c = getConnection(url, inCookies, true, false, false, allowAllCerts);
ByteArrayOutputStream out = new ByteArrayOutputStream(16384);
InputStream stream = IOUtilities.getInputStream(c);
IOUtilities.transfer(stream, out);
stream.close();
if (outCookies != null)
{ // [optional] Fetch cookies from server and update outCookie Map (pick up JSESSIONID, other headers)
getCookies(c, outCookies);
}
return out.toByteArray();
}
catch (SSLHandshakeException e)
{ // Don't read error response. it will just cause another exception.
LOG.warn("SSL Exception occurred fetching content from url: " + url, e);
return null;
}
catch (Exception e)
{
readErrorResponse(c);
LOG.warn("Exception occurred fetching content from url: " + url, e);
return null;
}
finally
{
if (c instanceof HttpURLConnection)
{
disconnect((HttpURLConnection)c);
}
}
} } | public class class_name {
public static byte[] getContentFromUrl(URL url, Map inCookies, Map outCookies, boolean allowAllCerts)
{
URLConnection c = null;
try
{
c = getConnection(url, inCookies, true, false, false, allowAllCerts); // depends on control dependency: [try], data = [none]
ByteArrayOutputStream out = new ByteArrayOutputStream(16384);
InputStream stream = IOUtilities.getInputStream(c);
IOUtilities.transfer(stream, out); // depends on control dependency: [try], data = [none]
stream.close(); // depends on control dependency: [try], data = [none]
if (outCookies != null)
{ // [optional] Fetch cookies from server and update outCookie Map (pick up JSESSIONID, other headers)
getCookies(c, outCookies); // depends on control dependency: [if], data = [none]
}
return out.toByteArray(); // depends on control dependency: [try], data = [none]
}
catch (SSLHandshakeException e)
{ // Don't read error response. it will just cause another exception.
LOG.warn("SSL Exception occurred fetching content from url: " + url, e);
return null;
} // depends on control dependency: [catch], data = [none]
catch (Exception e)
{
readErrorResponse(c);
LOG.warn("Exception occurred fetching content from url: " + url, e);
return null;
} // depends on control dependency: [catch], data = [none]
finally
{
if (c instanceof HttpURLConnection)
{
disconnect((HttpURLConnection)c); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
@Override
@SuppressWarnings("unchecked")
public Spliterator<E_OUT> spliterator() {
if (linkedOrConsumed)
throw new IllegalStateException(MSG_STREAM_LINKED);
linkedOrConsumed = true;
if (this == sourceStage) {
if (sourceStage.sourceSpliterator != null) {
@SuppressWarnings("unchecked")
Spliterator<E_OUT> s = (Spliterator<E_OUT>) sourceStage.sourceSpliterator;
sourceStage.sourceSpliterator = null;
return s;
}
else if (sourceStage.sourceSupplier != null) {
@SuppressWarnings("unchecked")
Supplier<Spliterator<E_OUT>> s = (Supplier<Spliterator<E_OUT>>) sourceStage.sourceSupplier;
sourceStage.sourceSupplier = null;
return lazySpliterator(s);
}
else {
throw new IllegalStateException(MSG_CONSUMED);
}
}
else {
return wrap(this, () -> sourceSpliterator(0), isParallel());
}
} } | public class class_name {
@Override
@SuppressWarnings("unchecked")
public Spliterator<E_OUT> spliterator() {
if (linkedOrConsumed)
throw new IllegalStateException(MSG_STREAM_LINKED);
linkedOrConsumed = true;
if (this == sourceStage) {
if (sourceStage.sourceSpliterator != null) {
@SuppressWarnings("unchecked")
Spliterator<E_OUT> s = (Spliterator<E_OUT>) sourceStage.sourceSpliterator;
sourceStage.sourceSpliterator = null; // depends on control dependency: [if], data = [none]
return s; // depends on control dependency: [if], data = [none]
}
else if (sourceStage.sourceSupplier != null) {
@SuppressWarnings("unchecked")
Supplier<Spliterator<E_OUT>> s = (Supplier<Spliterator<E_OUT>>) sourceStage.sourceSupplier;
sourceStage.sourceSupplier = null; // depends on control dependency: [if], data = [none]
return lazySpliterator(s); // depends on control dependency: [if], data = [none]
}
else {
throw new IllegalStateException(MSG_CONSUMED);
}
}
else {
return wrap(this, () -> sourceSpliterator(0), isParallel()); // depends on control dependency: [if], data = [(this]
}
} } |
public class class_name {
@Override public final void onViewRecycled(VH holder) {
int position = holder.getAdapterPosition();
if (isHeaderPosition(position)) {
onHeaderViewRecycled(holder);
} else if (isFooterPosition(position)) {
onFooterViewRecycled(holder);
} else {
onItemViewRecycled(holder);
}
} } | public class class_name {
@Override public final void onViewRecycled(VH holder) {
int position = holder.getAdapterPosition();
if (isHeaderPosition(position)) {
onHeaderViewRecycled(holder); // depends on control dependency: [if], data = [none]
} else if (isFooterPosition(position)) {
onFooterViewRecycled(holder); // depends on control dependency: [if], data = [none]
} else {
onItemViewRecycled(holder); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public String getNodeNameX(int nodeHandle)
{
String name;
short type = getNodeType(nodeHandle);
switch (type)
{
case DTM.NAMESPACE_NODE :
{
Node node = getNode(nodeHandle);
// assume not null.
name = node.getNodeName();
if(name.startsWith("xmlns:"))
{
name = QName.getLocalPart(name);
}
else if(name.equals("xmlns"))
{
name = "";
}
}
break;
case DTM.ATTRIBUTE_NODE :
case DTM.ELEMENT_NODE :
case DTM.ENTITY_REFERENCE_NODE :
case DTM.PROCESSING_INSTRUCTION_NODE :
{
Node node = getNode(nodeHandle);
// assume not null.
name = node.getNodeName();
}
break;
default :
name = "";
}
return name;
} } | public class class_name {
public String getNodeNameX(int nodeHandle)
{
String name;
short type = getNodeType(nodeHandle);
switch (type)
{
case DTM.NAMESPACE_NODE :
{
Node node = getNode(nodeHandle);
// assume not null.
name = node.getNodeName();
if(name.startsWith("xmlns:"))
{
name = QName.getLocalPart(name); // depends on control dependency: [if], data = [none]
}
else if(name.equals("xmlns"))
{
name = ""; // depends on control dependency: [if], data = [none]
}
}
break;
case DTM.ATTRIBUTE_NODE :
case DTM.ELEMENT_NODE :
case DTM.ENTITY_REFERENCE_NODE :
case DTM.PROCESSING_INSTRUCTION_NODE :
{
Node node = getNode(nodeHandle);
// assume not null.
name = node.getNodeName();
}
break;
default :
name = "";
}
return name;
} } |
public class class_name {
private void cacheInsert(MasterKey key, StatefulBeanO sfbean) {
CacheElement cacheElement = cache.insert(key, sfbean);
sfbean.ivCacheElement = cacheElement;
sfbean.ivCacheKey = key;
if (!sfbean.getHome().getBeanMetaData().isPassivationCapable()) {
cache.markElementEvictionIneligible(cacheElement);
}
} } | public class class_name {
private void cacheInsert(MasterKey key, StatefulBeanO sfbean) {
CacheElement cacheElement = cache.insert(key, sfbean);
sfbean.ivCacheElement = cacheElement;
sfbean.ivCacheKey = key;
if (!sfbean.getHome().getBeanMetaData().isPassivationCapable()) {
cache.markElementEvictionIneligible(cacheElement); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static boolean set(RythmEngine engine) {
if (_engine.get() == null) {
_engine.set(engine);
return true;
} else {
return false;
}
} } | public class class_name {
public static boolean set(RythmEngine engine) {
if (_engine.get() == null) {
_engine.set(engine); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
} else {
return false; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public PrecisionRecallCurve getPrecisionRecallCurve() {
if (prCurve != null) {
return prCurve;
}
double[] thresholdOut;
double[] precisionOut;
double[] recallOut;
int[] tpCountOut;
int[] fpCountOut;
int[] fnCountOut;
if (isExact) {
INDArray pl = getProbAndLabelUsed();
INDArray sorted = Nd4j.sortRows(pl, 0, false);
INDArray isPositive = sorted.getColumn(1,true);
INDArray cumSumPos = isPositive.cumsum(-1);
val length = sorted.size(0);
/*
Sort descending. As we iterate: decrease probability threshold T... all values <= T are predicted
as class 0, all others are predicted as class 1
Precision: sum(TP) / sum(predicted pos at threshold)
Recall: sum(TP) / total actual positives
predicted positive at threshold: # values <= threshold, i.e., just i
*/
INDArray t = Nd4j.create(DataType.DOUBLE, length + 2, 1);
t.put(new INDArrayIndex[]{interval(1, length + 1), all()}, sorted.getColumn(0,true));
INDArray linspace = Nd4j.linspace(1, length, length, DataType.DOUBLE);
INDArray precision = cumSumPos.castTo(DataType.DOUBLE).div(linspace.reshape(cumSumPos.shape()));
INDArray prec = Nd4j.create(DataType.DOUBLE, length + 2, 1);
prec.put(new INDArrayIndex[]{interval(1, length + 1), all()}, precision);
//Recall/TPR
INDArray rec = Nd4j.create(DataType.DOUBLE, length + 2, 1);
rec.put(new INDArrayIndex[]{interval(1, length + 1), all()},
cumSumPos.div(countActualPositive));
//Edge cases
t.putScalar(0, 0, 1.0);
prec.putScalar(0, 0, 1.0);
rec.putScalar(0, 0, 0.0);
prec.putScalar(length + 1, 0, cumSumPos.getDouble(cumSumPos.length() - 1) / length);
rec.putScalar(length + 1, 0, 1.0);
thresholdOut = t.data().asDouble();
precisionOut = prec.data().asDouble();
recallOut = rec.data().asDouble();
//Counts. Note the edge cases
tpCountOut = new int[thresholdOut.length];
fpCountOut = new int[thresholdOut.length];
fnCountOut = new int[thresholdOut.length];
for (int i = 1; i < tpCountOut.length - 1; i++) {
tpCountOut[i] = cumSumPos.getInt(i - 1);
fpCountOut[i] = i - tpCountOut[i]; //predicted positive - true positive
fnCountOut[i] = (int) countActualPositive - tpCountOut[i];
}
//Edge cases: last idx -> threshold of 0.0, all predicted positive
tpCountOut[tpCountOut.length - 1] = (int) countActualPositive;
fpCountOut[tpCountOut.length - 1] = (int) (exampleCount - countActualPositive);
fnCountOut[tpCountOut.length - 1] = 0;
//Edge case: first idx -> threshold of 1.0, all predictions negative
tpCountOut[0] = 0;
fpCountOut[0] = 0; //(int)(exampleCount - countActualPositive); //All negatives are predicted positive
fnCountOut[0] = (int) countActualPositive;
//Finally: 2 things to do
//(a) Reverse order: lowest to highest threshold
//(b) remove unnecessary/rendundant points (doesn't affect graph or AUPRC)
ArrayUtils.reverse(thresholdOut);
ArrayUtils.reverse(precisionOut);
ArrayUtils.reverse(recallOut);
ArrayUtils.reverse(tpCountOut);
ArrayUtils.reverse(fpCountOut);
ArrayUtils.reverse(fnCountOut);
if (rocRemoveRedundantPts) {
Pair<double[][], int[][]> pair = removeRedundant(thresholdOut, precisionOut, recallOut, tpCountOut,
fpCountOut, fnCountOut);
double[][] temp = pair.getFirst();
int[][] temp2 = pair.getSecond();
thresholdOut = temp[0];
precisionOut = temp[1];
recallOut = temp[2];
tpCountOut = temp2[0];
fpCountOut = temp2[1];
fnCountOut = temp2[2];
}
} else {
thresholdOut = new double[counts.size()];
precisionOut = new double[counts.size()];
recallOut = new double[counts.size()];
tpCountOut = new int[counts.size()];
fpCountOut = new int[counts.size()];
fnCountOut = new int[counts.size()];
int i = 0;
for (Map.Entry<Double, CountsForThreshold> entry : counts.entrySet()) {
double t = entry.getKey();
CountsForThreshold c = entry.getValue();
long tpCount = c.getCountTruePositive();
long fpCount = c.getCountFalsePositive();
//For edge cases: http://stats.stackexchange.com/questions/1773/what-are-correct-values-for-precision-and-recall-in-edge-cases
//precision == 1 when FP = 0 -> no incorrect positive predictions
//recall == 1 when no dataset positives are present (got all 0 of 0 positives)
double precision;
if (tpCount == 0 && fpCount == 0) {
//At this threshold: no predicted positive cases
precision = 1.0;
} else {
precision = tpCount / (double) (tpCount + fpCount);
}
double recall;
if (countActualPositive == 0) {
recall = 1.0;
} else {
recall = tpCount / ((double) countActualPositive);
}
thresholdOut[i] = c.getThreshold();
precisionOut[i] = precision;
recallOut[i] = recall;
tpCountOut[i] = (int) tpCount;
fpCountOut[i] = (int) fpCount;
fnCountOut[i] = (int) (countActualPositive - tpCount);
i++;
}
}
prCurve = new PrecisionRecallCurve(thresholdOut, precisionOut, recallOut, tpCountOut, fpCountOut, fnCountOut,
exampleCount);
return prCurve;
} } | public class class_name {
public PrecisionRecallCurve getPrecisionRecallCurve() {
if (prCurve != null) {
return prCurve; // depends on control dependency: [if], data = [none]
}
double[] thresholdOut;
double[] precisionOut;
double[] recallOut;
int[] tpCountOut;
int[] fpCountOut;
int[] fnCountOut;
if (isExact) {
INDArray pl = getProbAndLabelUsed();
INDArray sorted = Nd4j.sortRows(pl, 0, false);
INDArray isPositive = sorted.getColumn(1,true);
INDArray cumSumPos = isPositive.cumsum(-1);
val length = sorted.size(0);
/*
Sort descending. As we iterate: decrease probability threshold T... all values <= T are predicted
as class 0, all others are predicted as class 1
Precision: sum(TP) / sum(predicted pos at threshold)
Recall: sum(TP) / total actual positives
predicted positive at threshold: # values <= threshold, i.e., just i
*/
INDArray t = Nd4j.create(DataType.DOUBLE, length + 2, 1);
t.put(new INDArrayIndex[]{interval(1, length + 1), all()}, sorted.getColumn(0,true)); // depends on control dependency: [if], data = [none]
INDArray linspace = Nd4j.linspace(1, length, length, DataType.DOUBLE);
INDArray precision = cumSumPos.castTo(DataType.DOUBLE).div(linspace.reshape(cumSumPos.shape()));
INDArray prec = Nd4j.create(DataType.DOUBLE, length + 2, 1);
prec.put(new INDArrayIndex[]{interval(1, length + 1), all()}, precision); // depends on control dependency: [if], data = [none]
//Recall/TPR
INDArray rec = Nd4j.create(DataType.DOUBLE, length + 2, 1);
rec.put(new INDArrayIndex[]{interval(1, length + 1), all()},
cumSumPos.div(countActualPositive)); // depends on control dependency: [if], data = [none]
//Edge cases
t.putScalar(0, 0, 1.0); // depends on control dependency: [if], data = [none]
prec.putScalar(0, 0, 1.0); // depends on control dependency: [if], data = [none]
rec.putScalar(0, 0, 0.0); // depends on control dependency: [if], data = [none]
prec.putScalar(length + 1, 0, cumSumPos.getDouble(cumSumPos.length() - 1) / length); // depends on control dependency: [if], data = [none]
rec.putScalar(length + 1, 0, 1.0); // depends on control dependency: [if], data = [none]
thresholdOut = t.data().asDouble(); // depends on control dependency: [if], data = [none]
precisionOut = prec.data().asDouble(); // depends on control dependency: [if], data = [none]
recallOut = rec.data().asDouble(); // depends on control dependency: [if], data = [none]
//Counts. Note the edge cases
tpCountOut = new int[thresholdOut.length]; // depends on control dependency: [if], data = [none]
fpCountOut = new int[thresholdOut.length]; // depends on control dependency: [if], data = [none]
fnCountOut = new int[thresholdOut.length]; // depends on control dependency: [if], data = [none]
for (int i = 1; i < tpCountOut.length - 1; i++) {
tpCountOut[i] = cumSumPos.getInt(i - 1); // depends on control dependency: [for], data = [i]
fpCountOut[i] = i - tpCountOut[i]; //predicted positive - true positive // depends on control dependency: [for], data = [i]
fnCountOut[i] = (int) countActualPositive - tpCountOut[i]; // depends on control dependency: [for], data = [i]
}
//Edge cases: last idx -> threshold of 0.0, all predicted positive
tpCountOut[tpCountOut.length - 1] = (int) countActualPositive; // depends on control dependency: [if], data = [none]
fpCountOut[tpCountOut.length - 1] = (int) (exampleCount - countActualPositive); // depends on control dependency: [if], data = [none]
fnCountOut[tpCountOut.length - 1] = 0; // depends on control dependency: [if], data = [none]
//Edge case: first idx -> threshold of 1.0, all predictions negative
tpCountOut[0] = 0; // depends on control dependency: [if], data = [none]
fpCountOut[0] = 0; //(int)(exampleCount - countActualPositive); //All negatives are predicted positive // depends on control dependency: [if], data = [none]
fnCountOut[0] = (int) countActualPositive; // depends on control dependency: [if], data = [none]
//Finally: 2 things to do
//(a) Reverse order: lowest to highest threshold
//(b) remove unnecessary/rendundant points (doesn't affect graph or AUPRC)
ArrayUtils.reverse(thresholdOut); // depends on control dependency: [if], data = [none]
ArrayUtils.reverse(precisionOut); // depends on control dependency: [if], data = [none]
ArrayUtils.reverse(recallOut); // depends on control dependency: [if], data = [none]
ArrayUtils.reverse(tpCountOut); // depends on control dependency: [if], data = [none]
ArrayUtils.reverse(fpCountOut); // depends on control dependency: [if], data = [none]
ArrayUtils.reverse(fnCountOut); // depends on control dependency: [if], data = [none]
if (rocRemoveRedundantPts) {
Pair<double[][], int[][]> pair = removeRedundant(thresholdOut, precisionOut, recallOut, tpCountOut,
fpCountOut, fnCountOut);
double[][] temp = pair.getFirst();
int[][] temp2 = pair.getSecond();
thresholdOut = temp[0]; // depends on control dependency: [if], data = [none]
precisionOut = temp[1]; // depends on control dependency: [if], data = [none]
recallOut = temp[2]; // depends on control dependency: [if], data = [none]
tpCountOut = temp2[0]; // depends on control dependency: [if], data = [none]
fpCountOut = temp2[1]; // depends on control dependency: [if], data = [none]
fnCountOut = temp2[2]; // depends on control dependency: [if], data = [none]
}
} else {
thresholdOut = new double[counts.size()]; // depends on control dependency: [if], data = [none]
precisionOut = new double[counts.size()]; // depends on control dependency: [if], data = [none]
recallOut = new double[counts.size()]; // depends on control dependency: [if], data = [none]
tpCountOut = new int[counts.size()]; // depends on control dependency: [if], data = [none]
fpCountOut = new int[counts.size()]; // depends on control dependency: [if], data = [none]
fnCountOut = new int[counts.size()]; // depends on control dependency: [if], data = [none]
int i = 0;
for (Map.Entry<Double, CountsForThreshold> entry : counts.entrySet()) {
double t = entry.getKey();
CountsForThreshold c = entry.getValue();
long tpCount = c.getCountTruePositive();
long fpCount = c.getCountFalsePositive();
//For edge cases: http://stats.stackexchange.com/questions/1773/what-are-correct-values-for-precision-and-recall-in-edge-cases
//precision == 1 when FP = 0 -> no incorrect positive predictions
//recall == 1 when no dataset positives are present (got all 0 of 0 positives)
double precision;
if (tpCount == 0 && fpCount == 0) {
//At this threshold: no predicted positive cases
precision = 1.0; // depends on control dependency: [if], data = [none]
} else {
precision = tpCount / (double) (tpCount + fpCount); // depends on control dependency: [if], data = [(tpCount]
}
double recall;
if (countActualPositive == 0) {
recall = 1.0; // depends on control dependency: [if], data = [none]
} else {
recall = tpCount / ((double) countActualPositive); // depends on control dependency: [if], data = [none]
}
thresholdOut[i] = c.getThreshold(); // depends on control dependency: [for], data = [none]
precisionOut[i] = precision; // depends on control dependency: [for], data = [none]
recallOut[i] = recall; // depends on control dependency: [for], data = [none]
tpCountOut[i] = (int) tpCount; // depends on control dependency: [for], data = [none]
fpCountOut[i] = (int) fpCount; // depends on control dependency: [for], data = [none]
fnCountOut[i] = (int) (countActualPositive - tpCount); // depends on control dependency: [for], data = [none]
i++; // depends on control dependency: [for], data = [none]
}
}
prCurve = new PrecisionRecallCurve(thresholdOut, precisionOut, recallOut, tpCountOut, fpCountOut, fnCountOut,
exampleCount);
return prCurve;
} } |
public class class_name {
private void processVirtualHostName(HttpMethod httpMethodProxyRequest, HttpServletRequest httpServletRequest) {
String virtualHostName;
if (httpMethodProxyRequest.getRequestHeader(STRING_HOST_HEADER_NAME) != null) {
virtualHostName = HttpUtilities.removePortFromHostHeaderString(httpMethodProxyRequest.getRequestHeader(STRING_HOST_HEADER_NAME).getValue());
} else {
virtualHostName = HttpUtilities.getHostNameFromURL(httpServletRequest.getRequestURL().toString());
}
httpMethodProxyRequest.getParams().setVirtualHost(virtualHostName);
} } | public class class_name {
private void processVirtualHostName(HttpMethod httpMethodProxyRequest, HttpServletRequest httpServletRequest) {
String virtualHostName;
if (httpMethodProxyRequest.getRequestHeader(STRING_HOST_HEADER_NAME) != null) {
virtualHostName = HttpUtilities.removePortFromHostHeaderString(httpMethodProxyRequest.getRequestHeader(STRING_HOST_HEADER_NAME).getValue()); // depends on control dependency: [if], data = [(httpMethodProxyRequest.getRequestHeader(STRING_HOST_HEADER_NAME)]
} else {
virtualHostName = HttpUtilities.getHostNameFromURL(httpServletRequest.getRequestURL().toString()); // depends on control dependency: [if], data = [none]
}
httpMethodProxyRequest.getParams().setVirtualHost(virtualHostName);
} } |
public class class_name {
private void sendOrPublishMessage(Message message) {
if (endpointConfiguration.isPubSubDomain()) {
if (log.isDebugEnabled()) {
log.debug("Publish Vert.x event bus message to address: '" + endpointConfiguration.getAddress() + "'");
}
vertx.eventBus().publish(endpointConfiguration.getAddress(), message.getPayload());
} else {
if (log.isDebugEnabled()) {
log.debug("Sending Vert.x event bus message to address: '" + endpointConfiguration.getAddress() + "'");
}
vertx.eventBus().send(endpointConfiguration.getAddress(), message.getPayload());
}
} } | public class class_name {
private void sendOrPublishMessage(Message message) {
if (endpointConfiguration.isPubSubDomain()) {
if (log.isDebugEnabled()) {
log.debug("Publish Vert.x event bus message to address: '" + endpointConfiguration.getAddress() + "'"); // depends on control dependency: [if], data = [none]
}
vertx.eventBus().publish(endpointConfiguration.getAddress(), message.getPayload()); // depends on control dependency: [if], data = [none]
} else {
if (log.isDebugEnabled()) {
log.debug("Sending Vert.x event bus message to address: '" + endpointConfiguration.getAddress() + "'"); // depends on control dependency: [if], data = [none]
}
vertx.eventBus().send(endpointConfiguration.getAddress(), message.getPayload()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public void onCompleted(WorkflowRun workflowRun, @Nonnull TaskListener listener) {
super.onCompleted(workflowRun, listener);
// Note: run.duration is zero in onCompleted(), do the substraction in this listener
Result result = workflowRun.getResult();
if (result == null) {
result = Result.SUCCESS; // FIXME more elegant handling
}
globalPipelineMavenConfig.getDao().updateBuildOnCompletion(
workflowRun.getParent().getFullName(),
workflowRun.getNumber(),
result.ordinal,
workflowRun.getStartTimeInMillis(),
Math.max(System.currentTimeMillis() - workflowRun.getStartTimeInMillis(), 0)); // @see HUDSON-5844
} } | public class class_name {
@Override
public void onCompleted(WorkflowRun workflowRun, @Nonnull TaskListener listener) {
super.onCompleted(workflowRun, listener);
// Note: run.duration is zero in onCompleted(), do the substraction in this listener
Result result = workflowRun.getResult();
if (result == null) {
result = Result.SUCCESS; // FIXME more elegant handling // depends on control dependency: [if], data = [none]
}
globalPipelineMavenConfig.getDao().updateBuildOnCompletion(
workflowRun.getParent().getFullName(),
workflowRun.getNumber(),
result.ordinal,
workflowRun.getStartTimeInMillis(),
Math.max(System.currentTimeMillis() - workflowRun.getStartTimeInMillis(), 0)); // @see HUDSON-5844
} } |
public class class_name {
public static String fixupScheme(String url) {
final String[] SCHEMES = {
"http:/", "https:/", "ftp:/", "rtsp:/", "mms:/"
};
int ul = url.length();
for (String scheme : SCHEMES) {
int sl = scheme.length();
if (url.startsWith(scheme) && (ul == sl || url.charAt(sl) != '/')) {
return scheme + "/" + url.substring(sl);
}
}
return url;
} } | public class class_name {
public static String fixupScheme(String url) {
final String[] SCHEMES = {
"http:/", "https:/", "ftp:/", "rtsp:/", "mms:/"
};
int ul = url.length();
for (String scheme : SCHEMES) {
int sl = scheme.length();
if (url.startsWith(scheme) && (ul == sl || url.charAt(sl) != '/')) {
return scheme + "/" + url.substring(sl); // depends on control dependency: [if], data = [none]
}
}
return url;
} } |
public class class_name {
private void addSoapHeaders(SoapMessage response, Message replyMessage) throws TransformerException {
for (Entry<String, Object> headerEntry : replyMessage.getHeaders().entrySet()) {
if (MessageHeaderUtils.isSpringInternalHeader(headerEntry.getKey()) ||
headerEntry.getKey().startsWith(DEFAULT_JMS_HEADER_PREFIX)) {
continue;
}
if (headerEntry.getKey().equalsIgnoreCase(SoapMessageHeaders.SOAP_ACTION)) {
response.setSoapAction(headerEntry.getValue().toString());
} else if (!headerEntry.getKey().startsWith(MessageHeaders.PREFIX)) {
SoapHeaderElement headerElement;
if (QNameUtils.validateQName(headerEntry.getKey())) {
QName qname = QNameUtils.parseQNameString(headerEntry.getKey());
if (StringUtils.hasText(qname.getNamespaceURI())) {
headerElement = response.getSoapHeader().addHeaderElement(qname);
} else {
headerElement = response.getSoapHeader().addHeaderElement(getDefaultQName(headerEntry.getKey()));
}
} else {
throw new SoapHeaderException("Failed to add SOAP header '" + headerEntry.getKey() + "', " +
"because of invalid QName");
}
headerElement.setText(headerEntry.getValue().toString());
}
}
for (String headerData : replyMessage.getHeaderData()) {
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
transformer.transform(new StringSource(headerData),
response.getSoapHeader().getResult());
}
} } | public class class_name {
private void addSoapHeaders(SoapMessage response, Message replyMessage) throws TransformerException {
for (Entry<String, Object> headerEntry : replyMessage.getHeaders().entrySet()) {
if (MessageHeaderUtils.isSpringInternalHeader(headerEntry.getKey()) ||
headerEntry.getKey().startsWith(DEFAULT_JMS_HEADER_PREFIX)) {
continue;
}
if (headerEntry.getKey().equalsIgnoreCase(SoapMessageHeaders.SOAP_ACTION)) {
response.setSoapAction(headerEntry.getValue().toString());
} else if (!headerEntry.getKey().startsWith(MessageHeaders.PREFIX)) {
SoapHeaderElement headerElement;
if (QNameUtils.validateQName(headerEntry.getKey())) {
QName qname = QNameUtils.parseQNameString(headerEntry.getKey());
if (StringUtils.hasText(qname.getNamespaceURI())) {
headerElement = response.getSoapHeader().addHeaderElement(qname); // depends on control dependency: [if], data = [none]
} else {
headerElement = response.getSoapHeader().addHeaderElement(getDefaultQName(headerEntry.getKey())); // depends on control dependency: [if], data = [none]
}
} else {
throw new SoapHeaderException("Failed to add SOAP header '" + headerEntry.getKey() + "', " +
"because of invalid QName");
}
headerElement.setText(headerEntry.getValue().toString());
}
}
for (String headerData : replyMessage.getHeaderData()) {
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
transformer.transform(new StringSource(headerData),
response.getSoapHeader().getResult());
}
} } |
public class class_name {
private boolean scopeUpdatePermitted(final AnalysisJobBuilder sourceAnalysisJobBuilder,
final ComponentBuilder componentBuilder) {
if (sourceAnalysisJobBuilder != componentBuilder.getAnalysisJobBuilder()) {
if (componentBuilder.getInput().length > 0 || componentBuilder.getComponentRequirement() != null) {
final String scopeText;
scopeText = LabelUtils.getScopeLabel(sourceAnalysisJobBuilder);
final int response = JOptionPane.showConfirmDialog(_graphContext.getVisualizationViewer(),
"This will move " + LabelUtils.getLabel(componentBuilder) + " into the " + scopeText
+ ", thereby losing its configured columns and/or requirements", "Change scope?",
JOptionPane.OK_CANCEL_OPTION, JOptionPane.WARNING_MESSAGE);
if (response == JOptionPane.CANCEL_OPTION) {
_graphContext.getJobGraph().refresh();
return false;
}
}
}
return true;
} } | public class class_name {
private boolean scopeUpdatePermitted(final AnalysisJobBuilder sourceAnalysisJobBuilder,
final ComponentBuilder componentBuilder) {
if (sourceAnalysisJobBuilder != componentBuilder.getAnalysisJobBuilder()) {
if (componentBuilder.getInput().length > 0 || componentBuilder.getComponentRequirement() != null) {
final String scopeText;
scopeText = LabelUtils.getScopeLabel(sourceAnalysisJobBuilder); // depends on control dependency: [if], data = [none]
final int response = JOptionPane.showConfirmDialog(_graphContext.getVisualizationViewer(),
"This will move " + LabelUtils.getLabel(componentBuilder) + " into the " + scopeText
+ ", thereby losing its configured columns and/or requirements", "Change scope?",
JOptionPane.OK_CANCEL_OPTION, JOptionPane.WARNING_MESSAGE);
if (response == JOptionPane.CANCEL_OPTION) {
_graphContext.getJobGraph().refresh(); // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
}
}
return true;
} } |
public class class_name {
protected void runStatements(Reader reader, PrintStream out)
throws IOException {
log.debug("runStatements()");
StringBuilder txt = new StringBuilder();
String line = "";
BufferedReader in = new BufferedReader(reader);
while ((line = in.readLine()) != null) {
line = getProject().replaceProperties(line);
if (line.indexOf("--") >= 0) {
txt.append("\n");
}
}
// Catch any statements not followed by ;
if (!txt.toString().equals("")) {
execGroovy(txt.toString(), out);
}
} } | public class class_name {
protected void runStatements(Reader reader, PrintStream out)
throws IOException {
log.debug("runStatements()");
StringBuilder txt = new StringBuilder();
String line = "";
BufferedReader in = new BufferedReader(reader);
while ((line = in.readLine()) != null) {
line = getProject().replaceProperties(line);
if (line.indexOf("--") >= 0) {
txt.append("\n"); // depends on control dependency: [if], data = [none]
}
}
// Catch any statements not followed by ;
if (!txt.toString().equals("")) {
execGroovy(txt.toString(), out);
}
} } |
public class class_name {
@NullSafe
@SuppressWarnings("unchecked")
public static <T> T[] asArray(Iterable<T> iterable, Class<T> componentType) {
List<T> arrayList = new ArrayList<>();
for (T element : CollectionUtils.nullSafeIterable(iterable)) {
arrayList.add(element);
}
return arrayList.toArray((T[]) Array.newInstance(defaultIfNull(componentType, Object.class), arrayList.size()));
} } | public class class_name {
@NullSafe
@SuppressWarnings("unchecked")
public static <T> T[] asArray(Iterable<T> iterable, Class<T> componentType) {
List<T> arrayList = new ArrayList<>();
for (T element : CollectionUtils.nullSafeIterable(iterable)) {
arrayList.add(element); // depends on control dependency: [for], data = [element]
}
return arrayList.toArray((T[]) Array.newInstance(defaultIfNull(componentType, Object.class), arrayList.size()));
} } |
public class class_name {
static void shutdownZkClient(AvatarZooKeeperClient zk) {
if (zk != null) {
try {
zk.shutdown();
} catch (InterruptedException e) {
LOG.error("Error shutting down ZooKeeper client", e);
}
}
} } | public class class_name {
static void shutdownZkClient(AvatarZooKeeperClient zk) {
if (zk != null) {
try {
zk.shutdown(); // depends on control dependency: [try], data = [none]
} catch (InterruptedException e) {
LOG.error("Error shutting down ZooKeeper client", e);
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public JsonWriter valueObject(@Nullable Object value) {
try {
if (value == null) {
stream.nullValue();
return this;
}
valueNonNullObject(value);
return this;
} catch (IllegalArgumentException e) {
throw e;
} catch (Exception e) {
throw rethrow(e);
}
} } | public class class_name {
public JsonWriter valueObject(@Nullable Object value) {
try {
if (value == null) {
stream.nullValue(); // depends on control dependency: [if], data = [none]
return this; // depends on control dependency: [if], data = [none]
}
valueNonNullObject(value); // depends on control dependency: [try], data = [none]
return this; // depends on control dependency: [try], data = [none]
} catch (IllegalArgumentException e) {
throw e;
} catch (Exception e) { // depends on control dependency: [catch], data = [none]
throw rethrow(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected static void main(String[] args, CliTool... commands) {
if (commands.length == 1) {
main(args, commands[0]);
} else {
JCommander jc = new JCommander();
for (CliTool command : commands) {
jc.addCommand(command);
}
jc.addConverterFactory(new CustomParameterConverters());
jc.setProgramName("");
ExitStatus exitStatus = ExitStatus.SUCCESS;
try {
jc.parse(args);
final String commandName = jc.getParsedCommand();
if (commandName == null) {
helpDisplayCommandOptions(System.err, jc);
} else {
List<Object> objects = jc.getCommands().get(commandName).getObjects();
if (objects.size() != 1) {
throw new RuntimeException();
}
CliTool command = CliTool.class.cast(objects.get(0));
exitStatus = command.call();
if (command.callSystemExit) {
System.exit(exitStatus.code);
}
}
} catch (ExitStatusException e) {
System.err.println(e.getMessage());
if (e.getCause() != null) {
e.getCause().printStackTrace(System.err);
}
exitStatus = e.exitStatus;
} catch (MissingCommandException e) {
System.err.println("Invalid argument: " + e);
System.err.println();
helpDisplayCommandOptions(System.err, jc);
exitStatus = ExitStatus.ERROR_INVALID_ARGUMENTS;
} catch (ParameterException e) {
System.err.println("Invalid argument: " + e.getMessage());
System.err.println();
if (jc.getParsedCommand() == null) {
helpDisplayCommandOptions(System.err, jc);
} else {
helpDisplayCommandOptions(System.err, jc.getParsedCommand(), jc);
}
exitStatus = ExitStatus.ERROR_INVALID_ARGUMENTS;
} catch (Throwable t) {
System.err.println("An unhandled exception occurred. Stack trace below.");
t.printStackTrace(System.err);
exitStatus = ExitStatus.ERROR_OTHER;
}
}
} } | public class class_name {
protected static void main(String[] args, CliTool... commands) {
if (commands.length == 1) {
main(args, commands[0]);
// depends on control dependency: [if], data = [none]
} else {
JCommander jc = new JCommander();
for (CliTool command : commands) {
jc.addCommand(command);
// depends on control dependency: [for], data = [command]
}
jc.addConverterFactory(new CustomParameterConverters());
// depends on control dependency: [if], data = [none]
jc.setProgramName("");
// depends on control dependency: [if], data = [none]
ExitStatus exitStatus = ExitStatus.SUCCESS;
try {
jc.parse(args);
// depends on control dependency: [try], data = [none]
final String commandName = jc.getParsedCommand();
if (commandName == null) {
helpDisplayCommandOptions(System.err, jc);
// depends on control dependency: [if], data = [none]
} else {
List<Object> objects = jc.getCommands().get(commandName).getObjects();
if (objects.size() != 1) {
throw new RuntimeException();
}
CliTool command = CliTool.class.cast(objects.get(0));
exitStatus = command.call();
// depends on control dependency: [if], data = [none]
if (command.callSystemExit) {
System.exit(exitStatus.code);
// depends on control dependency: [if], data = [none]
}
}
} catch (ExitStatusException e) {
System.err.println(e.getMessage());
if (e.getCause() != null) {
e.getCause().printStackTrace(System.err);
// depends on control dependency: [if], data = [none]
}
exitStatus = e.exitStatus;
} catch (MissingCommandException e) {
// depends on control dependency: [catch], data = [none]
System.err.println("Invalid argument: " + e);
System.err.println();
helpDisplayCommandOptions(System.err, jc);
exitStatus = ExitStatus.ERROR_INVALID_ARGUMENTS;
} catch (ParameterException e) {
// depends on control dependency: [catch], data = [none]
System.err.println("Invalid argument: " + e.getMessage());
System.err.println();
if (jc.getParsedCommand() == null) {
helpDisplayCommandOptions(System.err, jc);
// depends on control dependency: [if], data = [none]
} else {
helpDisplayCommandOptions(System.err, jc.getParsedCommand(), jc);
// depends on control dependency: [if], data = [none]
}
exitStatus = ExitStatus.ERROR_INVALID_ARGUMENTS;
} catch (Throwable t) {
// depends on control dependency: [catch], data = [none]
System.err.println("An unhandled exception occurred. Stack trace below.");
t.printStackTrace(System.err);
exitStatus = ExitStatus.ERROR_OTHER;
}
// depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
@Override
public GeoShapeCondition build() {
try {
return new GeoShapeCondition(boost, field, shape, operation);
} catch (NoClassDefFoundError e) {
throw new JTSNotFoundException();
}
} } | public class class_name {
@Override
public GeoShapeCondition build() {
try {
return new GeoShapeCondition(boost, field, shape, operation); // depends on control dependency: [try], data = [none]
} catch (NoClassDefFoundError e) {
throw new JTSNotFoundException();
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
static long calcInterval(int segments, int start, int limit) {
long range = limit - start;
if (range < 0) {
return 0;
}
int partSegment = (range % segments) == 0 ? 0
: 1;
return (range / segments) + partSegment;
} } | public class class_name {
static long calcInterval(int segments, int start, int limit) {
long range = limit - start;
if (range < 0) {
return 0; // depends on control dependency: [if], data = [none]
}
int partSegment = (range % segments) == 0 ? 0
: 1;
return (range / segments) + partSegment;
} } |
public class class_name {
@Deprecated
public Set<Class<?>> findComponentRoleClasses(Class<?> componentClass)
{
// Note: We use a Set to ensure that we don't register duplicate roles.
Set<Class<?>> classes = new LinkedHashSet<>();
Component component = componentClass.getAnnotation(Component.class);
if (component != null && component.roles().length > 0) {
classes.addAll(Arrays.asList(component.roles()));
} else {
// Look in both superclass and interfaces for @Role or javax.inject.Provider
for (Class<?> interfaceClass : componentClass.getInterfaces()) {
// Handle superclass of interfaces
classes.addAll(findComponentRoleClasses(interfaceClass));
// Handle interfaces directly declared in the passed component class
for (Annotation annotation : interfaceClass.getDeclaredAnnotations()) {
if (annotation.annotationType() == ComponentRole.class) {
classes.add(interfaceClass);
}
}
// Handle javax.inject.Provider
if (Provider.class.isAssignableFrom(interfaceClass)) {
classes.add(interfaceClass);
}
}
// Note that we need to look into the superclass since the super class can itself implements an interface
// that has the @Role annotation.
Class<?> superClass = componentClass.getSuperclass();
if (superClass != null && superClass != Object.class) {
classes.addAll(findComponentRoleClasses(superClass));
}
}
return classes;
} } | public class class_name {
@Deprecated
public Set<Class<?>> findComponentRoleClasses(Class<?> componentClass)
{
// Note: We use a Set to ensure that we don't register duplicate roles.
Set<Class<?>> classes = new LinkedHashSet<>();
Component component = componentClass.getAnnotation(Component.class);
if (component != null && component.roles().length > 0) {
classes.addAll(Arrays.asList(component.roles()));
} else {
// Look in both superclass and interfaces for @Role or javax.inject.Provider
for (Class<?> interfaceClass : componentClass.getInterfaces()) {
// Handle superclass of interfaces
classes.addAll(findComponentRoleClasses(interfaceClass)); // depends on control dependency: [for], data = [interfaceClass]
// Handle interfaces directly declared in the passed component class
for (Annotation annotation : interfaceClass.getDeclaredAnnotations()) {
if (annotation.annotationType() == ComponentRole.class) {
classes.add(interfaceClass); // depends on control dependency: [if], data = [none]
}
}
// Handle javax.inject.Provider
if (Provider.class.isAssignableFrom(interfaceClass)) {
classes.add(interfaceClass); // depends on control dependency: [if], data = [none]
}
}
// Note that we need to look into the superclass since the super class can itself implements an interface
// that has the @Role annotation.
Class<?> superClass = componentClass.getSuperclass();
if (superClass != null && superClass != Object.class) {
classes.addAll(findComponentRoleClasses(superClass)); // depends on control dependency: [if], data = [(superClass]
}
}
return classes;
} } |
public class class_name {
public final void setMaxWidth(final int maxWidth) {
if (maxWidth != -1) {
Condition.INSTANCE.ensureAtLeast(maxWidth, 1, "The maximum width must be at least 1");
}
this.maxWidth = maxWidth;
requestLayout();
} } | public class class_name {
public final void setMaxWidth(final int maxWidth) {
if (maxWidth != -1) {
Condition.INSTANCE.ensureAtLeast(maxWidth, 1, "The maximum width must be at least 1"); // depends on control dependency: [if], data = [(maxWidth]
}
this.maxWidth = maxWidth;
requestLayout();
} } |
public class class_name {
public String getName() {
if (name == null) {
name = JKMessage.get(getJavaType().getSimpleName());
}
return name;
} } | public class class_name {
public String getName() {
if (name == null) {
name = JKMessage.get(getJavaType().getSimpleName());
// depends on control dependency: [if], data = [none]
}
return name;
} } |
public class class_name {
protected String safeGetMessage(MessageEnvelope envelope) {
if (envelope != null && envelope.getMessage() != null
&& envelope.getMessage().getText() != null
&& envelope.getMessage().getQuickReply() == null) {
return envelope.getMessage().getText();
}
return "";
} } | public class class_name {
protected String safeGetMessage(MessageEnvelope envelope) {
if (envelope != null && envelope.getMessage() != null
&& envelope.getMessage().getText() != null
&& envelope.getMessage().getQuickReply() == null) {
return envelope.getMessage().getText(); // depends on control dependency: [if], data = [none]
}
return "";
} } |
public class class_name {
public static Callable getValueFunctionAndThis(Object value, Context cx)
{
if (!(value instanceof Callable)) {
throw notFunctionError(value);
}
Callable f = (Callable)value;
Scriptable thisObj = null;
if (f instanceof Scriptable) {
thisObj = ((Scriptable)f).getParentScope();
}
if (thisObj == null) {
if (cx.topCallScope == null) throw new IllegalStateException();
thisObj = cx.topCallScope;
}
if (thisObj.getParentScope() != null) {
if (thisObj instanceof NativeWith) {
// functions defined inside with should have with target
// as their thisObj
} else if (thisObj instanceof NativeCall) {
// nested functions should have top scope as their thisObj
thisObj = ScriptableObject.getTopLevelScope(thisObj);
}
}
storeScriptable(cx, thisObj);
return f;
} } | public class class_name {
public static Callable getValueFunctionAndThis(Object value, Context cx)
{
if (!(value instanceof Callable)) {
throw notFunctionError(value);
}
Callable f = (Callable)value;
Scriptable thisObj = null;
if (f instanceof Scriptable) {
thisObj = ((Scriptable)f).getParentScope(); // depends on control dependency: [if], data = [none]
}
if (thisObj == null) {
if (cx.topCallScope == null) throw new IllegalStateException();
thisObj = cx.topCallScope; // depends on control dependency: [if], data = [none]
}
if (thisObj.getParentScope() != null) {
if (thisObj instanceof NativeWith) {
// functions defined inside with should have with target
// as their thisObj
} else if (thisObj instanceof NativeCall) {
// nested functions should have top scope as their thisObj
thisObj = ScriptableObject.getTopLevelScope(thisObj); // depends on control dependency: [if], data = [none]
}
}
storeScriptable(cx, thisObj);
return f;
} } |
public class class_name {
@Override
public void check(final List<TransportCandidate> localCandidates) {
// TODO candidate is being checked trigger
// candidatesChecking.add(cand);
final ICECandidate checkingCandidate = this;
Thread checkThread = new Thread(new Runnable() {
@Override
public void run() {
final TestResult result = new TestResult();
// Media Proxy don't have Echo features.
// If its a relayed candidate we assumed that is NOT Valid while other candidates still being checked.
// The negotiator MUST add then in the correct situations
if (getType().equals(Type.relay)) {
triggerCandidateChecked(false);
return;
}
ResultListener resultListener = new ResultListener() {
@Override
public void testFinished(TestResult testResult, TransportCandidate candidate) {
if (testResult.isReachable() && checkingCandidate.equals(candidate)) {
result.setResult(true);
LOGGER.fine("Candidate reachable: " + candidate.getIp() + ":" + candidate.getPort() + " from " + getIp() + ":" + getPort());
}
}
};
for (TransportCandidate candidate : localCandidates) {
CandidateEcho echo = candidate.getCandidateEcho();
if (echo != null) {
if (candidate instanceof ICECandidate) {
ICECandidate iceCandidate = (ICECandidate) candidate;
if (iceCandidate.getType().equals(getType())) {
try {
echo.addResultListener(resultListener);
InetAddress address = InetAddress.getByName(getIp());
echo.testASync(checkingCandidate, getPassword());
}
catch (UnknownHostException e) {
LOGGER.log(Level.WARNING, "exception", e);
}
}
}
}
}
for (int i = 0; i < 10 && !result.isReachable(); i++)
try {
LOGGER.severe("ICE Candidate retry #" + i);
Thread.sleep(400);
}
catch (InterruptedException e) {
LOGGER.log(Level.WARNING, "exception", e);
}
for (TransportCandidate candidate : localCandidates) {
CandidateEcho echo = candidate.getCandidateEcho();
if (echo != null) {
echo.removeResultListener(resultListener);
}
}
triggerCandidateChecked(result.isReachable());
// TODO candidate is being checked trigger
// candidatesChecking.remove(cand);
}
}, "Transport candidate check");
checkThread.setName("Transport candidate test");
checkThread.start();
} } | public class class_name {
@Override
public void check(final List<TransportCandidate> localCandidates) {
// TODO candidate is being checked trigger
// candidatesChecking.add(cand);
final ICECandidate checkingCandidate = this;
Thread checkThread = new Thread(new Runnable() {
@Override
public void run() {
final TestResult result = new TestResult();
// Media Proxy don't have Echo features.
// If its a relayed candidate we assumed that is NOT Valid while other candidates still being checked.
// The negotiator MUST add then in the correct situations
if (getType().equals(Type.relay)) {
triggerCandidateChecked(false); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
ResultListener resultListener = new ResultListener() {
@Override
public void testFinished(TestResult testResult, TransportCandidate candidate) {
if (testResult.isReachable() && checkingCandidate.equals(candidate)) {
result.setResult(true);
LOGGER.fine("Candidate reachable: " + candidate.getIp() + ":" + candidate.getPort() + " from " + getIp() + ":" + getPort());
}
}
};
for (TransportCandidate candidate : localCandidates) {
CandidateEcho echo = candidate.getCandidateEcho();
if (echo != null) {
if (candidate instanceof ICECandidate) {
ICECandidate iceCandidate = (ICECandidate) candidate;
if (iceCandidate.getType().equals(getType())) {
try {
echo.addResultListener(resultListener);
InetAddress address = InetAddress.getByName(getIp());
echo.testASync(checkingCandidate, getPassword());
}
catch (UnknownHostException e) {
LOGGER.log(Level.WARNING, "exception", e);
}
}
}
}
}
for (int i = 0; i < 10 && !result.isReachable(); i++)
try {
LOGGER.severe("ICE Candidate retry #" + i);
Thread.sleep(400);
}
catch (InterruptedException e) {
LOGGER.log(Level.WARNING, "exception", e);
}
for (TransportCandidate candidate : localCandidates) {
CandidateEcho echo = candidate.getCandidateEcho();
if (echo != null) {
echo.removeResultListener(resultListener);
}
}
triggerCandidateChecked(result.isReachable());
// TODO candidate is being checked trigger
// candidatesChecking.remove(cand);
}
}, "Transport candidate check");
checkThread.setName("Transport candidate test");
checkThread.start();
} } |
public class class_name {
private void setUpChild(View child) {
// Respect layout params that are already in the view. Otherwise
// make some up...
ViewGroup.LayoutParams lp = child.getLayoutParams();
if (lp == null) {
lp = generateDefaultLayoutParams();
}
addViewInLayout(child, 0, lp);
child.setSelected(hasFocus());
if (mDisableChildrenWhenDisabled) {
child.setEnabled(isEnabled());
}
// Get measure specs
int childHeightSpec = ViewGroup.getChildMeasureSpec(mHeightMeasureSpec,
mSpinnerPadding.top + mSpinnerPadding.bottom, lp.height);
int childWidthSpec = ViewGroup.getChildMeasureSpec(mWidthMeasureSpec,
mSpinnerPadding.left + mSpinnerPadding.right, lp.width);
// Measure child
child.measure(childWidthSpec, childHeightSpec);
int childLeft;
int childRight;
// Position vertically based on gravity setting
int childTop = mSpinnerPadding.top
+ ((getMeasuredHeight() - mSpinnerPadding.bottom -
mSpinnerPadding.top - child.getMeasuredHeight()) / 2);
int childBottom = childTop + child.getMeasuredHeight();
int width = child.getMeasuredWidth();
childLeft = 0;
childRight = childLeft + width;
child.layout(childLeft, childTop, childRight, childBottom);
} } | public class class_name {
private void setUpChild(View child) {
// Respect layout params that are already in the view. Otherwise
// make some up...
ViewGroup.LayoutParams lp = child.getLayoutParams();
if (lp == null) {
lp = generateDefaultLayoutParams(); // depends on control dependency: [if], data = [none]
}
addViewInLayout(child, 0, lp);
child.setSelected(hasFocus());
if (mDisableChildrenWhenDisabled) {
child.setEnabled(isEnabled()); // depends on control dependency: [if], data = [none]
}
// Get measure specs
int childHeightSpec = ViewGroup.getChildMeasureSpec(mHeightMeasureSpec,
mSpinnerPadding.top + mSpinnerPadding.bottom, lp.height);
int childWidthSpec = ViewGroup.getChildMeasureSpec(mWidthMeasureSpec,
mSpinnerPadding.left + mSpinnerPadding.right, lp.width);
// Measure child
child.measure(childWidthSpec, childHeightSpec);
int childLeft;
int childRight;
// Position vertically based on gravity setting
int childTop = mSpinnerPadding.top
+ ((getMeasuredHeight() - mSpinnerPadding.bottom -
mSpinnerPadding.top - child.getMeasuredHeight()) / 2);
int childBottom = childTop + child.getMeasuredHeight();
int width = child.getMeasuredWidth();
childLeft = 0;
childRight = childLeft + width;
child.layout(childLeft, childTop, childRight, childBottom);
} } |
public class class_name {
public ComposeEnvironmentsRequest withVersionLabels(String... versionLabels) {
if (this.versionLabels == null) {
setVersionLabels(new com.amazonaws.internal.SdkInternalList<String>(versionLabels.length));
}
for (String ele : versionLabels) {
this.versionLabels.add(ele);
}
return this;
} } | public class class_name {
public ComposeEnvironmentsRequest withVersionLabels(String... versionLabels) {
if (this.versionLabels == null) {
setVersionLabels(new com.amazonaws.internal.SdkInternalList<String>(versionLabels.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : versionLabels) {
this.versionLabels.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
private static int interleaveBits(int odd, int even) {
int val = 0;
// Replaced this line with the improved code provided by Tuska
// int n = Math.max(Integer.highestOneBit(odd), Integer.highestOneBit(even));
int max = Math.max(odd, even);
int n = 0;
while (max > 0) {
n++;
max >>= 1;
}
for (int i = 0; i < n; i++) {
int bitMask = 1 << i;
int a = (even & bitMask) > 0 ? (1 << (2 * i)) : 0;
int b = (odd & bitMask) > 0 ? (1 << (2 * i + 1)) : 0;
val += a + b;
}
return val;
} } | public class class_name {
private static int interleaveBits(int odd, int even) {
int val = 0;
// Replaced this line with the improved code provided by Tuska
// int n = Math.max(Integer.highestOneBit(odd), Integer.highestOneBit(even));
int max = Math.max(odd, even);
int n = 0;
while (max > 0) {
n++; // depends on control dependency: [while], data = [none]
max >>= 1; // depends on control dependency: [while], data = [none]
}
for (int i = 0; i < n; i++) {
int bitMask = 1 << i;
int a = (even & bitMask) > 0 ? (1 << (2 * i)) : 0;
int b = (odd & bitMask) > 0 ? (1 << (2 * i + 1)) : 0;
val += a + b; // depends on control dependency: [for], data = [none]
}
return val;
} } |
public class class_name {
@Override
protected void bulkLoad(List<E> spatialObjects) {
if(!initialized) {
initialize(spatialObjects.get(0));
}
StringBuilder msg = getLogger().isDebuggingFine() ? new StringBuilder() : null;
// Tiny tree that fits into a single page
if(spatialObjects.size() <= leafCapacity) {
N root = createNewLeafNode();
root.setPageID(getRootID());
writeNode(root);
createRoot(root, spatialObjects);
setHeight(1);
if(msg != null) {
msg.append("\n numNodes = 1");
}
}
// root is directory node
else {
N root = createNewDirectoryNode();
root.setPageID(getRootID());
writeNode(root);
// create leaf nodes
List<E> nodes = createBulkLeafNodes(spatialObjects);
int numNodes = nodes.size();
if(msg != null) {
msg.append("\n numLeafNodes = ").append(numNodes);
}
setHeight(1);
// create directory nodes
while(nodes.size() > (dirCapacity - 1)) {
nodes = createBulkDirectoryNodes(nodes);
numNodes += nodes.size();
setHeight(getHeight() + 1);
}
// create root
createRoot(root, nodes);
numNodes++;
setHeight(getHeight() + 1);
if(msg != null) {
msg.append("\n numNodes = ").append(numNodes);
}
}
if(msg != null) {
msg.append("\n height = ").append(getHeight());
msg.append("\n root ").append(getRoot());
getLogger().debugFine(msg.toString());
}
} } | public class class_name {
@Override
protected void bulkLoad(List<E> spatialObjects) {
if(!initialized) {
initialize(spatialObjects.get(0)); // depends on control dependency: [if], data = [none]
}
StringBuilder msg = getLogger().isDebuggingFine() ? new StringBuilder() : null;
// Tiny tree that fits into a single page
if(spatialObjects.size() <= leafCapacity) {
N root = createNewLeafNode();
root.setPageID(getRootID()); // depends on control dependency: [if], data = [none]
writeNode(root); // depends on control dependency: [if], data = [none]
createRoot(root, spatialObjects); // depends on control dependency: [if], data = [none]
setHeight(1); // depends on control dependency: [if], data = [none]
if(msg != null) {
msg.append("\n numNodes = 1"); // depends on control dependency: [if], data = [none]
}
}
// root is directory node
else {
N root = createNewDirectoryNode();
root.setPageID(getRootID()); // depends on control dependency: [if], data = [none]
writeNode(root); // depends on control dependency: [if], data = [none]
// create leaf nodes
List<E> nodes = createBulkLeafNodes(spatialObjects);
int numNodes = nodes.size();
if(msg != null) {
msg.append("\n numLeafNodes = ").append(numNodes); // depends on control dependency: [if], data = [none]
}
setHeight(1); // depends on control dependency: [if], data = [none]
// create directory nodes
while(nodes.size() > (dirCapacity - 1)) {
nodes = createBulkDirectoryNodes(nodes); // depends on control dependency: [while], data = [none]
numNodes += nodes.size(); // depends on control dependency: [while], data = [none]
setHeight(getHeight() + 1); // depends on control dependency: [while], data = [none]
}
// create root
createRoot(root, nodes); // depends on control dependency: [if], data = [none]
numNodes++; // depends on control dependency: [if], data = [none]
setHeight(getHeight() + 1); // depends on control dependency: [if], data = [none]
if(msg != null) {
msg.append("\n numNodes = ").append(numNodes); // depends on control dependency: [if], data = [none]
}
}
if(msg != null) {
msg.append("\n height = ").append(getHeight()); // depends on control dependency: [if], data = [none]
msg.append("\n root ").append(getRoot()); // depends on control dependency: [if], data = [none]
getLogger().debugFine(msg.toString()); // depends on control dependency: [if], data = [(msg]
}
} } |
public class class_name {
public FileAppendWriter write(char[] chars) {
try {
writer.write(chars);
} catch (IOException e) {
e.printStackTrace();
}
return this;
} } | public class class_name {
public FileAppendWriter write(char[] chars) {
try {
writer.write(chars); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
return this;
} } |
public class class_name {
private static int aggregateDimValue(
final int[] positions,
final BufferAggregator[] theAggregators,
final ByteBuffer resultsBuf,
final int numBytesPerRecord,
final int[] aggregatorOffsets,
final int aggSize,
final int aggExtra,
final int dimIndex,
int currentPosition
)
{
if (SKIP_POSITION_VALUE == positions[dimIndex]) {
return currentPosition;
}
if (INIT_POSITION_VALUE == positions[dimIndex]) {
positions[dimIndex] = currentPosition * numBytesPerRecord;
currentPosition++;
final int pos = positions[dimIndex];
for (int j = 0; j < aggSize; ++j) {
theAggregators[j].init(resultsBuf, pos + aggregatorOffsets[j]);
}
}
final int position = positions[dimIndex];
switch (aggExtra) {
case 7:
theAggregators[6].aggregate(resultsBuf, position + aggregatorOffsets[6]);
// fall through
case 6:
theAggregators[5].aggregate(resultsBuf, position + aggregatorOffsets[5]);
// fall through
case 5:
theAggregators[4].aggregate(resultsBuf, position + aggregatorOffsets[4]);
// fall through
case 4:
theAggregators[3].aggregate(resultsBuf, position + aggregatorOffsets[3]);
// fall through
case 3:
theAggregators[2].aggregate(resultsBuf, position + aggregatorOffsets[2]);
// fall through
case 2:
theAggregators[1].aggregate(resultsBuf, position + aggregatorOffsets[1]);
// fall through
case 1:
theAggregators[0].aggregate(resultsBuf, position + aggregatorOffsets[0]);
}
for (int j = aggExtra; j < aggSize; j += AGG_UNROLL_COUNT) {
theAggregators[j].aggregate(resultsBuf, position + aggregatorOffsets[j]);
theAggregators[j + 1].aggregate(resultsBuf, position + aggregatorOffsets[j + 1]);
theAggregators[j + 2].aggregate(resultsBuf, position + aggregatorOffsets[j + 2]);
theAggregators[j + 3].aggregate(resultsBuf, position + aggregatorOffsets[j + 3]);
theAggregators[j + 4].aggregate(resultsBuf, position + aggregatorOffsets[j + 4]);
theAggregators[j + 5].aggregate(resultsBuf, position + aggregatorOffsets[j + 5]);
theAggregators[j + 6].aggregate(resultsBuf, position + aggregatorOffsets[j + 6]);
theAggregators[j + 7].aggregate(resultsBuf, position + aggregatorOffsets[j + 7]);
}
return currentPosition;
} } | public class class_name {
private static int aggregateDimValue(
final int[] positions,
final BufferAggregator[] theAggregators,
final ByteBuffer resultsBuf,
final int numBytesPerRecord,
final int[] aggregatorOffsets,
final int aggSize,
final int aggExtra,
final int dimIndex,
int currentPosition
)
{
if (SKIP_POSITION_VALUE == positions[dimIndex]) {
return currentPosition; // depends on control dependency: [if], data = [none]
}
if (INIT_POSITION_VALUE == positions[dimIndex]) {
positions[dimIndex] = currentPosition * numBytesPerRecord; // depends on control dependency: [if], data = [none]
currentPosition++; // depends on control dependency: [if], data = [none]
final int pos = positions[dimIndex];
for (int j = 0; j < aggSize; ++j) {
theAggregators[j].init(resultsBuf, pos + aggregatorOffsets[j]); // depends on control dependency: [for], data = [j]
}
}
final int position = positions[dimIndex];
switch (aggExtra) {
case 7:
theAggregators[6].aggregate(resultsBuf, position + aggregatorOffsets[6]);
// fall through
case 6:
theAggregators[5].aggregate(resultsBuf, position + aggregatorOffsets[5]);
// fall through
case 5:
theAggregators[4].aggregate(resultsBuf, position + aggregatorOffsets[4]);
// fall through
case 4:
theAggregators[3].aggregate(resultsBuf, position + aggregatorOffsets[3]);
// fall through
case 3:
theAggregators[2].aggregate(resultsBuf, position + aggregatorOffsets[2]);
// fall through
case 2:
theAggregators[1].aggregate(resultsBuf, position + aggregatorOffsets[1]);
// fall through
case 1:
theAggregators[0].aggregate(resultsBuf, position + aggregatorOffsets[0]);
}
for (int j = aggExtra; j < aggSize; j += AGG_UNROLL_COUNT) {
theAggregators[j].aggregate(resultsBuf, position + aggregatorOffsets[j]); // depends on control dependency: [for], data = [j]
theAggregators[j + 1].aggregate(resultsBuf, position + aggregatorOffsets[j + 1]); // depends on control dependency: [for], data = [j]
theAggregators[j + 2].aggregate(resultsBuf, position + aggregatorOffsets[j + 2]); // depends on control dependency: [for], data = [j]
theAggregators[j + 3].aggregate(resultsBuf, position + aggregatorOffsets[j + 3]); // depends on control dependency: [for], data = [j]
theAggregators[j + 4].aggregate(resultsBuf, position + aggregatorOffsets[j + 4]); // depends on control dependency: [for], data = [j]
theAggregators[j + 5].aggregate(resultsBuf, position + aggregatorOffsets[j + 5]); // depends on control dependency: [for], data = [j]
theAggregators[j + 6].aggregate(resultsBuf, position + aggregatorOffsets[j + 6]); // depends on control dependency: [for], data = [j]
theAggregators[j + 7].aggregate(resultsBuf, position + aggregatorOffsets[j + 7]); // depends on control dependency: [for], data = [j]
}
return currentPosition;
} } |
public class class_name {
@Override
public boolean hasNext() {
if (!members.hasNext()) {
try {
getNextEntries();
} catch (final Exception ignored) {
LOG.error("An error occured while getting next entries", ignored);
}
}
return members.hasNext();
} } | public class class_name {
@Override
public boolean hasNext() {
if (!members.hasNext()) {
try {
getNextEntries(); // depends on control dependency: [try], data = [none]
} catch (final Exception ignored) {
LOG.error("An error occured while getting next entries", ignored);
} // depends on control dependency: [catch], data = [none]
}
return members.hasNext();
} } |
public class class_name {
public List<String> listEscapeXml(final List<?> target) {
if (target == null) {
return null;
}
final List<String> result = new ArrayList<String>(target.size() + 2);
for (final Object element : target) {
result.add(escapeXml(element));
}
return result;
} } | public class class_name {
public List<String> listEscapeXml(final List<?> target) {
if (target == null) {
return null; // depends on control dependency: [if], data = [none]
}
final List<String> result = new ArrayList<String>(target.size() + 2);
for (final Object element : target) {
result.add(escapeXml(element)); // depends on control dependency: [for], data = [element]
}
return result;
} } |
public class class_name {
@Override
protected void doRestore() throws Throwable
{
List<DataRestore> dataRestorers = new ArrayList<DataRestore>();
ManageableRepository repository = null;
try
{
repository = repositoryService.getRepository(repositoryEntry.getName());
}
catch (RepositoryConfigurationException e)
{
throw new RepositoryException(e);
}
List<Backupable> backupableComponents =
repository.getWorkspaceContainer(workspaceName).getComponentInstancesOfType(Backupable.class);
List<WorkspaceContainerFacade> workspacesWaits4Resume = new ArrayList<WorkspaceContainerFacade>();
Throwable throwable = null;
try
{
// set state SUSPENDED to other workspaces if singledb
if (!DBInitializerHelper.getDatabaseType(workspaceEntry).isMultiDatabase())
{
for (WorkspaceEntry we : repositoryEntry.getWorkspaceEntries())
{
if (!we.getName().equals(workspaceEntry.getName()))
{
WorkspaceContainerFacade wsContainer = repository.getWorkspaceContainer(we.getName());
wsContainer.setState(ManageableRepository.SUSPENDED);
workspacesWaits4Resume.add(wsContainer);
}
}
}
DataRestoreContext context = new DataRestoreContext(
new String[] {DataRestoreContext.STORAGE_DIR},
new Object[] {new File(restorePath)});
// restore all components
for (Backupable component : backupableComponents)
{
dataRestorers.add(component.getDataRestorer(context));
}
for (DataRestore restorer : dataRestorers)
{
restorer.clean();
}
for (DataRestore restorer : dataRestorers)
{
restorer.restore();
}
for (DataRestore restorer : dataRestorers)
{
restorer.commit();
}
}
catch (Throwable e) //NOSONAR
{
throwable = e;
for (DataRestore restorer : dataRestorers)
{
try
{
restorer.rollback();
}
catch (BackupException e1)
{
LOG.error("Can't rollback restorer", e1);
}
}
throw e;
}
finally
{
for (DataRestore restorer : dataRestorers)
{
try
{
restorer.close();
}
catch (BackupException e)
{
LOG.error("Can't close restorer", e);
}
}
try
{
for (WorkspaceContainerFacade wsContainer : workspacesWaits4Resume)
{
wsContainer.setState(ManageableRepository.ONLINE);
}
}
catch (RepositoryException e)
{
if (throwable == null)
{
LOG.error("Con not set ONLYNE state for repository " + repository.getConfiguration().getName(), e);
}
else
{
throw new RepositoryException("Con not set ONLINE state for repository "
+ repository.getConfiguration().getName(), e);
}
}
}
} } | public class class_name {
@Override
protected void doRestore() throws Throwable
{
List<DataRestore> dataRestorers = new ArrayList<DataRestore>();
ManageableRepository repository = null;
try
{
repository = repositoryService.getRepository(repositoryEntry.getName());
}
catch (RepositoryConfigurationException e)
{
throw new RepositoryException(e);
}
List<Backupable> backupableComponents =
repository.getWorkspaceContainer(workspaceName).getComponentInstancesOfType(Backupable.class);
List<WorkspaceContainerFacade> workspacesWaits4Resume = new ArrayList<WorkspaceContainerFacade>();
Throwable throwable = null;
try
{
// set state SUSPENDED to other workspaces if singledb
if (!DBInitializerHelper.getDatabaseType(workspaceEntry).isMultiDatabase())
{
for (WorkspaceEntry we : repositoryEntry.getWorkspaceEntries())
{
if (!we.getName().equals(workspaceEntry.getName()))
{
WorkspaceContainerFacade wsContainer = repository.getWorkspaceContainer(we.getName());
wsContainer.setState(ManageableRepository.SUSPENDED); // depends on control dependency: [if], data = [none]
workspacesWaits4Resume.add(wsContainer); // depends on control dependency: [if], data = [none]
}
}
}
DataRestoreContext context = new DataRestoreContext(
new String[] {DataRestoreContext.STORAGE_DIR},
new Object[] {new File(restorePath)});
// restore all components
for (Backupable component : backupableComponents)
{
dataRestorers.add(component.getDataRestorer(context)); // depends on control dependency: [for], data = [component]
}
for (DataRestore restorer : dataRestorers)
{
restorer.clean(); // depends on control dependency: [for], data = [restorer]
}
for (DataRestore restorer : dataRestorers)
{
restorer.restore(); // depends on control dependency: [for], data = [restorer]
}
for (DataRestore restorer : dataRestorers)
{
restorer.commit(); // depends on control dependency: [for], data = [restorer]
}
}
catch (Throwable e) //NOSONAR
{
throwable = e;
for (DataRestore restorer : dataRestorers)
{
try
{
restorer.rollback(); // depends on control dependency: [try], data = [none]
}
catch (BackupException e1)
{
LOG.error("Can't rollback restorer", e1);
}
}
throw e;
}
finally
{
for (DataRestore restorer : dataRestorers)
{
try
{
restorer.close();
}
catch (BackupException e)
{
LOG.error("Can't close restorer", e);
} // depends on control dependency: [catch], data = [none]
}
try
{
for (WorkspaceContainerFacade wsContainer : workspacesWaits4Resume)
{
wsContainer.setState(ManageableRepository.ONLINE); // depends on control dependency: [for], data = [wsContainer]
}
}
catch (RepositoryException e)
{
if (throwable == null)
{
LOG.error("Con not set ONLYNE state for repository " + repository.getConfiguration().getName(), e); // depends on control dependency: [if], data = [none]
}
else
{
throw new RepositoryException("Con not set ONLINE state for repository "
+ repository.getConfiguration().getName(), e);
}
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public boolean isSimpleType() {
if (//
fieldType.equals(Double.class.getCanonicalName()) || //
fieldType.equals(Float.class.getCanonicalName()) || //
fieldType.equals(Integer.class.getCanonicalName()) || //
fieldType.equals(double.class.getCanonicalName()) || //
fieldType.equals(float.class.getCanonicalName()) || //
fieldType.equals(int.class.getCanonicalName()) || //
fieldType.equals(Boolean.class.getCanonicalName()) || //
fieldType.equals(boolean.class.getCanonicalName()) || //
fieldType.equals(String.class.getCanonicalName()) //
) {
return true;
}
return false;
} } | public class class_name {
public boolean isSimpleType() {
if (//
fieldType.equals(Double.class.getCanonicalName()) || //
fieldType.equals(Float.class.getCanonicalName()) || //
fieldType.equals(Integer.class.getCanonicalName()) || //
fieldType.equals(double.class.getCanonicalName()) || //
fieldType.equals(float.class.getCanonicalName()) || //
fieldType.equals(int.class.getCanonicalName()) || //
fieldType.equals(Boolean.class.getCanonicalName()) || //
fieldType.equals(boolean.class.getCanonicalName()) || //
fieldType.equals(String.class.getCanonicalName()) //
) {
return true; // depends on control dependency: [if], data = []
}
return false;
} } |
public class class_name {
@Pure
protected String getLanguageScriptMemberGetter() {
final Grammar grammar = getGrammar();
final AbstractRule scriptRule = GrammarUtil.findRuleForName(grammar, getCodeBuilderConfig().getScriptRuleName());
for (final Assignment assignment : GrammarUtil.containedAssignments(scriptRule)) {
if ((assignment.getTerminal() instanceof RuleCall)
&& Objects.equals(((RuleCall) assignment.getTerminal()).getRule().getName(),
getCodeBuilderConfig().getTopElementRuleName())) {
return "get" + Strings.toFirstUpper(assignment.getFeature()); //$NON-NLS-1$
}
}
throw new IllegalStateException("member not found"); //$NON-NLS-1$
} } | public class class_name {
@Pure
protected String getLanguageScriptMemberGetter() {
final Grammar grammar = getGrammar();
final AbstractRule scriptRule = GrammarUtil.findRuleForName(grammar, getCodeBuilderConfig().getScriptRuleName());
for (final Assignment assignment : GrammarUtil.containedAssignments(scriptRule)) {
if ((assignment.getTerminal() instanceof RuleCall)
&& Objects.equals(((RuleCall) assignment.getTerminal()).getRule().getName(),
getCodeBuilderConfig().getTopElementRuleName())) {
return "get" + Strings.toFirstUpper(assignment.getFeature()); //$NON-NLS-1$ // depends on control dependency: [if], data = [none]
}
}
throw new IllegalStateException("member not found"); //$NON-NLS-1$
} } |
public class class_name {
public String getFullName()
{
final StringBuilder buffer = new StringBuilder();
if (hasFirstName())
{
buffer.append(trimToEmpty(firstName));
}
if (hasFullName())
{
buffer.append(" ");
}
if (hasLastName())
{
buffer.append(trimToEmpty(lastName));
}
return buffer.toString();
} } | public class class_name {
public String getFullName()
{
final StringBuilder buffer = new StringBuilder();
if (hasFirstName())
{
buffer.append(trimToEmpty(firstName)); // depends on control dependency: [if], data = [none]
}
if (hasFullName())
{
buffer.append(" "); // depends on control dependency: [if], data = [none]
}
if (hasLastName())
{
buffer.append(trimToEmpty(lastName)); // depends on control dependency: [if], data = [none]
}
return buffer.toString();
} } |
public class class_name {
@SuppressWarnings({ "rawtypes", "unchecked" })
public ChronoZonedDateTime<?> zonedDateTime(TemporalAccessor temporal) {
try {
ZoneId zone = ZoneId.from(temporal);
try {
Instant instant = Instant.from(temporal);
return zonedDateTime(instant, zone);
} catch (DateTimeException ex1) {
ChronoLocalDateTime cldt = localDateTime(temporal);
ChronoLocalDateTimeImpl cldtImpl = ensureChronoLocalDateTime(cldt);
return ChronoZonedDateTimeImpl.ofBest(cldtImpl, zone, null);
}
} catch (DateTimeException ex) {
throw new DateTimeException("Unable to obtain ChronoZonedDateTime from TemporalAccessor: " + temporal.getClass(), ex);
}
} } | public class class_name {
@SuppressWarnings({ "rawtypes", "unchecked" })
public ChronoZonedDateTime<?> zonedDateTime(TemporalAccessor temporal) {
try {
ZoneId zone = ZoneId.from(temporal);
try {
Instant instant = Instant.from(temporal);
return zonedDateTime(instant, zone); // depends on control dependency: [try], data = [none]
} catch (DateTimeException ex1) {
ChronoLocalDateTime cldt = localDateTime(temporal);
ChronoLocalDateTimeImpl cldtImpl = ensureChronoLocalDateTime(cldt);
return ChronoZonedDateTimeImpl.ofBest(cldtImpl, zone, null);
} // depends on control dependency: [catch], data = [none]
} catch (DateTimeException ex) {
throw new DateTimeException("Unable to obtain ChronoZonedDateTime from TemporalAccessor: " + temporal.getClass(), ex);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected void buildCache(int maxCacheSize) {
if (blackPrefixList != null && !blackPrefixList.isEmpty()) {
int min = Math.min(256, maxCacheSize);
int max = Math.min(10240, maxCacheSize);
ConcurrentLinkedHashMap.Builder<String, Boolean> builder = new ConcurrentLinkedHashMap.Builder<String, Boolean>()
.initialCapacity(min).maximumWeightedCapacity(max);
this.resultOfInBlackList = builder.build();
} else {
this.resultOfInBlackList = null;
}
} } | public class class_name {
protected void buildCache(int maxCacheSize) {
if (blackPrefixList != null && !blackPrefixList.isEmpty()) {
int min = Math.min(256, maxCacheSize);
int max = Math.min(10240, maxCacheSize);
ConcurrentLinkedHashMap.Builder<String, Boolean> builder = new ConcurrentLinkedHashMap.Builder<String, Boolean>()
.initialCapacity(min).maximumWeightedCapacity(max);
this.resultOfInBlackList = builder.build(); // depends on control dependency: [if], data = [none]
} else {
this.resultOfInBlackList = null; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void initialize(ClassLoader classLoader)
{
ComponentAnnotationLoader loader = new ComponentAnnotationLoader();
loader.initialize(this, classLoader);
// Extension point to allow component to manipulate ComponentManager initialized state.
try {
List<ComponentManagerInitializer> initializers = this.getInstanceList(ComponentManagerInitializer.class);
for (ComponentManagerInitializer initializer : initializers) {
initializer.initialize(this);
}
} catch (ComponentLookupException e) {
// Should never happen
this.logger.error("Failed to lookup ComponentManagerInitializer components", e);
}
} } | public class class_name {
public void initialize(ClassLoader classLoader)
{
ComponentAnnotationLoader loader = new ComponentAnnotationLoader();
loader.initialize(this, classLoader);
// Extension point to allow component to manipulate ComponentManager initialized state.
try {
List<ComponentManagerInitializer> initializers = this.getInstanceList(ComponentManagerInitializer.class);
for (ComponentManagerInitializer initializer : initializers) {
initializer.initialize(this); // depends on control dependency: [for], data = [initializer]
}
} catch (ComponentLookupException e) {
// Should never happen
this.logger.error("Failed to lookup ComponentManagerInitializer components", e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void inlinePrintNoQuotes(Object o) {
if(inline.length() > 0) {
inline.append(SEPARATOR);
}
// remove newlines
String str = o.toString().replace(NEWLINE, " ");
// escaping
str = str.replace("\\", "\\\\").replace("\"", "\\\"");
inline.append(str);
} } | public class class_name {
public void inlinePrintNoQuotes(Object o) {
if(inline.length() > 0) {
inline.append(SEPARATOR); // depends on control dependency: [if], data = [none]
}
// remove newlines
String str = o.toString().replace(NEWLINE, " ");
// escaping
str = str.replace("\\", "\\\\").replace("\"", "\\\"");
inline.append(str);
} } |
public class class_name {
private boolean createBondApi18(@NonNull final BluetoothDevice device) {
/*
* There is a createBond() method in BluetoothDevice class but for now it's hidden. We will call it using reflections. It has been revealed in KitKat (Api19)
*/
try {
final Method createBond = device.getClass().getMethod("createBond");
mService.sendLogBroadcast(DfuBaseService.LOG_LEVEL_DEBUG, "gatt.getDevice().createBond() (hidden)");
return (Boolean) createBond.invoke(device);
} catch (final Exception e) {
Log.w(TAG, "An exception occurred while creating bond", e);
}
return false;
} } | public class class_name {
private boolean createBondApi18(@NonNull final BluetoothDevice device) {
/*
* There is a createBond() method in BluetoothDevice class but for now it's hidden. We will call it using reflections. It has been revealed in KitKat (Api19)
*/
try {
final Method createBond = device.getClass().getMethod("createBond");
mService.sendLogBroadcast(DfuBaseService.LOG_LEVEL_DEBUG, "gatt.getDevice().createBond() (hidden)");
return (Boolean) createBond.invoke(device); // depends on control dependency: [try], data = [none]
} catch (final Exception e) {
Log.w(TAG, "An exception occurred while creating bond", e);
} // depends on control dependency: [catch], data = [none]
return false;
} } |
public class class_name {
public static String trimToNull(String string) {
if (string == null) {
return null;
}
String returnString = string.trim();
if (returnString.isEmpty()) {
return null;
} else {
return returnString;
}
} } | public class class_name {
public static String trimToNull(String string) {
if (string == null) {
return null; // depends on control dependency: [if], data = [none]
}
String returnString = string.trim();
if (returnString.isEmpty()) {
return null; // depends on control dependency: [if], data = [none]
} else {
return returnString; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static void withLink(Representation representation, String rel, String href, Optional<Predicate<ReadableRepresentation>> predicate, Optional<String> name, Optional<String> title, Optional<String> hreflang, Optional<String> profile) {
if (predicate.or(Predicates.<ReadableRepresentation>alwaysTrue()).apply(representation)) {
representation.withLink(rel, href, name.orNull(), title.orNull(), hreflang.orNull(), profile.orNull());
}
} } | public class class_name {
public static void withLink(Representation representation, String rel, String href, Optional<Predicate<ReadableRepresentation>> predicate, Optional<String> name, Optional<String> title, Optional<String> hreflang, Optional<String> profile) {
if (predicate.or(Predicates.<ReadableRepresentation>alwaysTrue()).apply(representation)) {
representation.withLink(rel, href, name.orNull(), title.orNull(), hreflang.orNull(), profile.orNull()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public List<JAXBElement<Object>> get_GenericApplicationPropertyOfBreaklineRelief() {
if (_GenericApplicationPropertyOfBreaklineRelief == null) {
_GenericApplicationPropertyOfBreaklineRelief = new ArrayList<JAXBElement<Object>>();
}
return this._GenericApplicationPropertyOfBreaklineRelief;
} } | public class class_name {
public List<JAXBElement<Object>> get_GenericApplicationPropertyOfBreaklineRelief() {
if (_GenericApplicationPropertyOfBreaklineRelief == null) {
_GenericApplicationPropertyOfBreaklineRelief = new ArrayList<JAXBElement<Object>>(); // depends on control dependency: [if], data = [none]
}
return this._GenericApplicationPropertyOfBreaklineRelief;
} } |
public class class_name {
@Override
public Set<KamNode> getAdjacentNodes(KamNode kamNode,
EdgeDirectionType edgeDirection, EdgeFilter edgeFilter,
NodeFilter nodeFilter) {
Set<KamNode> adjacentNodes = new LinkedHashSet<KamNode>();
KamNode node = null;
if (EdgeDirectionType.FORWARD == edgeDirection
|| EdgeDirectionType.BOTH == edgeDirection) {
final Set<KamEdge> sources = nodeSourceMap.get(kamNode);
if (hasItems(sources)) {
for (KamEdge kamEdge : sources) {
// Check for an edge filter
if (null != edgeFilter) {
if (!edgeFilter.accept(kamEdge)) {
continue;
}
}
node = kamEdge.getTargetNode();
// Check for a node filter
if (null != nodeFilter) {
if (!nodeFilter.accept(node)) {
continue;
}
}
adjacentNodes.add(node);
}
}
}
if (EdgeDirectionType.REVERSE == edgeDirection
|| EdgeDirectionType.BOTH == edgeDirection) {
final Set<KamEdge> targets = nodeTargetMap.get(kamNode);
if (hasItems(targets)) {
for (KamEdge kamEdge : targets) {
// Check for an edge filter
if (null != edgeFilter) {
if (!edgeFilter.accept(kamEdge)) {
continue;
}
}
node = kamEdge.getSourceNode();
// Check for a node filter
if (null != nodeFilter) {
if (!nodeFilter.accept(node)) {
continue;
}
}
adjacentNodes.add(node);
}
}
}
return adjacentNodes;
} } | public class class_name {
@Override
public Set<KamNode> getAdjacentNodes(KamNode kamNode,
EdgeDirectionType edgeDirection, EdgeFilter edgeFilter,
NodeFilter nodeFilter) {
Set<KamNode> adjacentNodes = new LinkedHashSet<KamNode>();
KamNode node = null;
if (EdgeDirectionType.FORWARD == edgeDirection
|| EdgeDirectionType.BOTH == edgeDirection) {
final Set<KamEdge> sources = nodeSourceMap.get(kamNode);
if (hasItems(sources)) {
for (KamEdge kamEdge : sources) {
// Check for an edge filter
if (null != edgeFilter) {
if (!edgeFilter.accept(kamEdge)) {
continue;
}
}
node = kamEdge.getTargetNode(); // depends on control dependency: [for], data = [kamEdge]
// Check for a node filter
if (null != nodeFilter) {
if (!nodeFilter.accept(node)) {
continue;
}
}
adjacentNodes.add(node); // depends on control dependency: [for], data = [none]
}
}
}
if (EdgeDirectionType.REVERSE == edgeDirection
|| EdgeDirectionType.BOTH == edgeDirection) {
final Set<KamEdge> targets = nodeTargetMap.get(kamNode);
if (hasItems(targets)) {
for (KamEdge kamEdge : targets) {
// Check for an edge filter
if (null != edgeFilter) {
if (!edgeFilter.accept(kamEdge)) {
continue;
}
}
node = kamEdge.getSourceNode(); // depends on control dependency: [for], data = [kamEdge]
// Check for a node filter
if (null != nodeFilter) {
if (!nodeFilter.accept(node)) {
continue;
}
}
adjacentNodes.add(node); // depends on control dependency: [for], data = [none]
}
}
}
return adjacentNodes;
} } |
public class class_name {
public void setSubFolders(java.util.Collection<Folder> subFolders) {
if (subFolders == null) {
this.subFolders = null;
return;
}
this.subFolders = new java.util.ArrayList<Folder>(subFolders);
} } | public class class_name {
public void setSubFolders(java.util.Collection<Folder> subFolders) {
if (subFolders == null) {
this.subFolders = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.subFolders = new java.util.ArrayList<Folder>(subFolders);
} } |
public class class_name {
public static synchronized String getLocalHostMetricName(int timeoutMs) {
if (sLocalHostMetricName != null) {
return sLocalHostMetricName;
}
sLocalHostMetricName = getLocalHostName(timeoutMs).replace('.', '_');
return sLocalHostMetricName;
} } | public class class_name {
public static synchronized String getLocalHostMetricName(int timeoutMs) {
if (sLocalHostMetricName != null) {
return sLocalHostMetricName; // depends on control dependency: [if], data = [none]
}
sLocalHostMetricName = getLocalHostName(timeoutMs).replace('.', '_');
return sLocalHostMetricName;
} } |
public class class_name {
public final int rightAdjustCharHeadWithPrev(byte[]bytes, int p, int s, int end, IntHolder prev) {
int p_ = leftAdjustCharHead(bytes, p, s, end);
if (p_ < s) {
if (prev != null) prev.value = p_;
p_ += length(bytes, p_, end);
} else {
if (prev != null) prev.value = -1; /* Sorry */
}
return p_;
} } | public class class_name {
public final int rightAdjustCharHeadWithPrev(byte[]bytes, int p, int s, int end, IntHolder prev) {
int p_ = leftAdjustCharHead(bytes, p, s, end);
if (p_ < s) {
if (prev != null) prev.value = p_;
p_ += length(bytes, p_, end); // depends on control dependency: [if], data = [none]
} else {
if (prev != null) prev.value = -1; /* Sorry */
}
return p_;
} } |
public class class_name {
private javax.sip.address.URI resolveSipOutbound(final javax.sip.address.URI uriToResolve) {
if (!uriToResolve.isSipURI()) {
return uriToResolve;
}
final javax.sip.address.SipURI sipURI = (javax.sip.address.SipURI) uriToResolve;
if (sipURI.getParameter(MessageDispatcher.SIP_OUTBOUND_PARAM_OB) == null) {
// no ob parameter, return
return uriToResolve;
}
final MobicentsSipSession session = getSipSession();
final javax.sip.address.SipURI flow = session.getFlow();
if (flow != null) {
if (logger.isDebugEnabled()) {
logger.debug("Found a flow \"" + flow + "\" for the original uri \"" + uriToResolve + "\"");
}
return flow;
}
return uriToResolve;
} } | public class class_name {
private javax.sip.address.URI resolveSipOutbound(final javax.sip.address.URI uriToResolve) {
if (!uriToResolve.isSipURI()) {
return uriToResolve; // depends on control dependency: [if], data = [none]
}
final javax.sip.address.SipURI sipURI = (javax.sip.address.SipURI) uriToResolve;
if (sipURI.getParameter(MessageDispatcher.SIP_OUTBOUND_PARAM_OB) == null) {
// no ob parameter, return
return uriToResolve; // depends on control dependency: [if], data = [none]
}
final MobicentsSipSession session = getSipSession();
final javax.sip.address.SipURI flow = session.getFlow();
if (flow != null) {
if (logger.isDebugEnabled()) {
logger.debug("Found a flow \"" + flow + "\" for the original uri \"" + uriToResolve + "\""); // depends on control dependency: [if], data = [none]
}
return flow; // depends on control dependency: [if], data = [none]
}
return uriToResolve;
} } |
public class class_name {
public static String urlEncode(String text)
{
String urlEncodedString=text;
if(text!=null)
{
try
{
urlEncodedString=URLEncoder.encode(text,SpiUtil.UTF_8_ENCODING_NAME);
}
catch(UnsupportedEncodingException exception)
{
throw new FaxException("Error while URL encoding text.",exception);
}
}
return urlEncodedString;
} } | public class class_name {
public static String urlEncode(String text)
{
String urlEncodedString=text;
if(text!=null)
{
try
{
urlEncodedString=URLEncoder.encode(text,SpiUtil.UTF_8_ENCODING_NAME); // depends on control dependency: [try], data = [none]
}
catch(UnsupportedEncodingException exception)
{
throw new FaxException("Error while URL encoding text.",exception);
} // depends on control dependency: [catch], data = [none]
}
return urlEncodedString;
} } |
public class class_name {
@Nullable
private Language getLanguageForLocalizedName(String languageName) {
for (Language element : Languages.get()) {
if (languageName.equals(element.getTranslatedName(messages))) {
return element;
}
}
return null;
} } | public class class_name {
@Nullable
private Language getLanguageForLocalizedName(String languageName) {
for (Language element : Languages.get()) {
if (languageName.equals(element.getTranslatedName(messages))) {
return element; // depends on control dependency: [if], data = [none]
}
}
return null;
} } |
public class class_name {
public static double computeL2(DoubleTuple t)
{
double sum = 0;
for (int i=0; i<t.getSize(); i++)
{
double ti = t.get(i);
sum += ti * ti;
}
return Math.sqrt(sum);
} } | public class class_name {
public static double computeL2(DoubleTuple t)
{
double sum = 0;
for (int i=0; i<t.getSize(); i++)
{
double ti = t.get(i);
sum += ti * ti;
// depends on control dependency: [for], data = [none]
}
return Math.sqrt(sum);
} } |
public class class_name {
public Version getVersion(String rawVersion)
{
Version version = this.versions.get(rawVersion);
if (version == null) {
version = new DefaultVersion(rawVersion);
this.versions.put(rawVersion, version);
}
return version;
} } | public class class_name {
public Version getVersion(String rawVersion)
{
Version version = this.versions.get(rawVersion);
if (version == null) {
version = new DefaultVersion(rawVersion); // depends on control dependency: [if], data = [none]
this.versions.put(rawVersion, version); // depends on control dependency: [if], data = [none]
}
return version;
} } |
public class class_name {
public void setProperty(final String property, final Object newValue) {
if (property.startsWith("@")) {
if (newValue instanceof String || newValue instanceof GString) {
final Iterator iter = iterator();
while (iter.hasNext()) {
final NodeChild child = (NodeChild) iter.next();
child.attributes().put(property.substring(1), newValue);
}
}
} else {
final GPathResult result = new NodeChildren(this, property, this.namespaceTagHints);
if (newValue instanceof Map) {
for (Object o : ((Map) newValue).entrySet()) {
final Map.Entry entry = (Map.Entry) o;
result.setProperty("@" + entry.getKey(), entry.getValue());
}
} else {
if (newValue instanceof Closure) {
result.replaceNode((Closure) newValue);
} else {
result.replaceBody(newValue);
}
}
}
} } | public class class_name {
public void setProperty(final String property, final Object newValue) {
if (property.startsWith("@")) {
if (newValue instanceof String || newValue instanceof GString) {
final Iterator iter = iterator();
while (iter.hasNext()) {
final NodeChild child = (NodeChild) iter.next();
child.attributes().put(property.substring(1), newValue); // depends on control dependency: [while], data = [none]
}
}
} else {
final GPathResult result = new NodeChildren(this, property, this.namespaceTagHints);
if (newValue instanceof Map) {
for (Object o : ((Map) newValue).entrySet()) {
final Map.Entry entry = (Map.Entry) o;
result.setProperty("@" + entry.getKey(), entry.getValue()); // depends on control dependency: [for], data = [o]
}
} else {
if (newValue instanceof Closure) {
result.replaceNode((Closure) newValue); // depends on control dependency: [if], data = [none]
} else {
result.replaceBody(newValue); // depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
protected void onFoundDestination (String destName) {
if ( ( destName != null ) && ( ! destName.isEmpty() ) ) {
DestinationState destState =
this.registry.putIfAbsent(destName, new DestinationState(destName, this.brokerId));
//
// If it was already there, mark it as seen now by the broker.
//
if ( destState != null ) {
destState.putBrokerInfo(this.brokerId, true);
}
}
} } | public class class_name {
protected void onFoundDestination (String destName) {
if ( ( destName != null ) && ( ! destName.isEmpty() ) ) {
DestinationState destState =
this.registry.putIfAbsent(destName, new DestinationState(destName, this.brokerId));
//
// If it was already there, mark it as seen now by the broker.
//
if ( destState != null ) {
destState.putBrokerInfo(this.brokerId, true); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void setRestrictions(List<IRule> rulesNew) throws CDKException {
Iterator<IRule> itRules = rulesNew.iterator();
while (itRules.hasNext()) {
IRule rule = itRules.next();
if (rule instanceof ElementRule) {
mfRange = (MolecularFormulaRange) ((Object[]) rule.getParameters())[0];
//removing the rule
Iterator<IRule> oldRuleIt = rules.iterator();
while (oldRuleIt.hasNext()) {
IRule oldRule = oldRuleIt.next();
if (oldRule instanceof ElementRule) {
rules.remove(oldRule);
rules.add(rule);
break;
}
}
this.matrix_Base = getMatrix(mfRange.getIsotopeCount());
} else if (rule instanceof ChargeRule) {
this.charge = (Double) ((Object[]) rule.getParameters())[0];
//removing the rule
Iterator<IRule> oldRuleIt = rules.iterator();
while (oldRuleIt.hasNext()) {
IRule oldRule = oldRuleIt.next();
if (oldRule instanceof ChargeRule) {
rules.remove(oldRule);
rules.add(rule);
break;
}
}
} else if (rule instanceof ToleranceRangeRule) {
this.tolerance = (Double) ((Object[]) rule.getParameters())[1];
//removing the rule
Iterator<IRule> oldRuleIt = rules.iterator();
while (oldRuleIt.hasNext()) {
IRule oldRule = oldRuleIt.next();
if (oldRule instanceof ToleranceRangeRule) {
rules.remove(oldRule);
rules.add(rule);
break;
}
}
} else {
rules.add(rule);
}
}
} } | public class class_name {
public void setRestrictions(List<IRule> rulesNew) throws CDKException {
Iterator<IRule> itRules = rulesNew.iterator();
while (itRules.hasNext()) {
IRule rule = itRules.next();
if (rule instanceof ElementRule) {
mfRange = (MolecularFormulaRange) ((Object[]) rule.getParameters())[0];
//removing the rule
Iterator<IRule> oldRuleIt = rules.iterator();
while (oldRuleIt.hasNext()) {
IRule oldRule = oldRuleIt.next();
if (oldRule instanceof ElementRule) {
rules.remove(oldRule); // depends on control dependency: [if], data = [none]
rules.add(rule); // depends on control dependency: [if], data = [none]
break;
}
}
this.matrix_Base = getMatrix(mfRange.getIsotopeCount());
} else if (rule instanceof ChargeRule) {
this.charge = (Double) ((Object[]) rule.getParameters())[0];
//removing the rule
Iterator<IRule> oldRuleIt = rules.iterator();
while (oldRuleIt.hasNext()) {
IRule oldRule = oldRuleIt.next();
if (oldRule instanceof ChargeRule) {
rules.remove(oldRule); // depends on control dependency: [if], data = [none]
rules.add(rule); // depends on control dependency: [if], data = [none]
break;
}
}
} else if (rule instanceof ToleranceRangeRule) {
this.tolerance = (Double) ((Object[]) rule.getParameters())[1];
//removing the rule
Iterator<IRule> oldRuleIt = rules.iterator();
while (oldRuleIt.hasNext()) {
IRule oldRule = oldRuleIt.next();
if (oldRule instanceof ToleranceRangeRule) {
rules.remove(oldRule); // depends on control dependency: [if], data = [none]
rules.add(rule); // depends on control dependency: [if], data = [none]
break;
}
}
} else {
rules.add(rule);
}
}
} } |
public class class_name {
@UiHandler("m_okButton")
protected void onClickOk(ClickEvent e) {
final boolean undoMove = m_undoMoveCheckbox.getFormValue().booleanValue();
m_popup.hide();
CmsRpcAction<Void> action = new CmsRpcAction<Void>() {
@Override
public void execute() {
start(200, true);
I_CmsVfsServiceAsync service = CmsCoreProvider.getVfsService();
service.undoChanges(m_restoreInfo.getStructureId(), undoMove, this);
}
@Override
protected void onResponse(Void result) {
stop(false);
if (m_afterRestoreAction != null) {
m_afterRestoreAction.run();
}
}
};
action.execute();
} } | public class class_name {
@UiHandler("m_okButton")
protected void onClickOk(ClickEvent e) {
final boolean undoMove = m_undoMoveCheckbox.getFormValue().booleanValue();
m_popup.hide();
CmsRpcAction<Void> action = new CmsRpcAction<Void>() {
@Override
public void execute() {
start(200, true);
I_CmsVfsServiceAsync service = CmsCoreProvider.getVfsService();
service.undoChanges(m_restoreInfo.getStructureId(), undoMove, this);
}
@Override
protected void onResponse(Void result) {
stop(false);
if (m_afterRestoreAction != null) {
m_afterRestoreAction.run();
// depends on control dependency: [if], data = [none]
}
}
};
action.execute();
} } |
public class class_name {
public static base_responses enable(nitro_service client, Long clid[]) throws Exception {
base_responses result = null;
if (clid != null && clid.length > 0) {
clusterinstance enableresources[] = new clusterinstance[clid.length];
for (int i=0;i<clid.length;i++){
enableresources[i] = new clusterinstance();
enableresources[i].clid = clid[i];
}
result = perform_operation_bulk_request(client, enableresources,"enable");
}
return result;
} } | public class class_name {
public static base_responses enable(nitro_service client, Long clid[]) throws Exception {
base_responses result = null;
if (clid != null && clid.length > 0) {
clusterinstance enableresources[] = new clusterinstance[clid.length];
for (int i=0;i<clid.length;i++){
enableresources[i] = new clusterinstance(); // depends on control dependency: [for], data = [i]
enableresources[i].clid = clid[i]; // depends on control dependency: [for], data = [i]
}
result = perform_operation_bulk_request(client, enableresources,"enable");
}
return result;
} } |
public class class_name {
public AttributeVector response() {
double[] y = new double[data.size()];
for (int i = 0; i < y.length; i++) {
y[i] = data.get(i).y;
}
return new AttributeVector(response, y);
} } | public class class_name {
public AttributeVector response() {
double[] y = new double[data.size()];
for (int i = 0; i < y.length; i++) {
y[i] = data.get(i).y; // depends on control dependency: [for], data = [i]
}
return new AttributeVector(response, y);
} } |
public class class_name {
@NotNull
public ByteBuf takeAtMost(int size) {
assert hasRemaining();
ByteBuf buf = bufs[first];
if (size >= buf.readRemaining()) {
first = next(first);
return buf;
}
ByteBuf result = buf.slice(size);
buf.moveHead(size);
return result;
} } | public class class_name {
@NotNull
public ByteBuf takeAtMost(int size) {
assert hasRemaining();
ByteBuf buf = bufs[first];
if (size >= buf.readRemaining()) {
first = next(first); // depends on control dependency: [if], data = [none]
return buf; // depends on control dependency: [if], data = [none]
}
ByteBuf result = buf.slice(size);
buf.moveHead(size);
return result;
} } |
public class class_name {
public WaiterState accepts(AmazonServiceException exception) throws AmazonServiceException {
for (WaiterAcceptor<Output> acceptor : acceptors) {
if (acceptor.matches(exception)) {
return acceptor.getState();
}
}
throw exception;
} } | public class class_name {
public WaiterState accepts(AmazonServiceException exception) throws AmazonServiceException {
for (WaiterAcceptor<Output> acceptor : acceptors) {
if (acceptor.matches(exception)) {
return acceptor.getState(); // depends on control dependency: [if], data = [none]
}
}
throw exception;
} } |
public class class_name {
private static <T> void siftDownComparable(int k, T x, Object[] array,
int n) {
if (n > 0) {
Comparable<? super T> key = (Comparable<? super T>)x;
int half = n >>> 1; // loop while a non-leaf
while (k < half) {
int child = (k << 1) + 1; // assume left child is least
Object c = array[child];
int right = child + 1;
if (right < n &&
((Comparable<? super T>) c).compareTo((T) array[right]) > 0)
c = array[child = right];
if (key.compareTo((T) c) <= 0)
break;
array[k] = c;
k = child;
}
array[k] = key;
}
} } | public class class_name {
private static <T> void siftDownComparable(int k, T x, Object[] array,
int n) {
if (n > 0) {
Comparable<? super T> key = (Comparable<? super T>)x; // depends on control dependency: [if], data = [none]
int half = n >>> 1; // loop while a non-leaf
while (k < half) {
int child = (k << 1) + 1; // assume left child is least
Object c = array[child];
int right = child + 1;
if (right < n &&
((Comparable<? super T>) c).compareTo((T) array[right]) > 0)
c = array[child = right];
if (key.compareTo((T) c) <= 0)
break;
array[k] = c; // depends on control dependency: [while], data = [none]
k = child; // depends on control dependency: [while], data = [none]
}
array[k] = key; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
static Field[] getAllFields(Class<?> type) {
List<Field> fields = new ArrayList<Field>();
for (Class<?> clazz = type; clazz != null; clazz = clazz.getSuperclass()) {
if (clazz == MPBase.class ||
clazz == Object.class) {
break;
}
fields.addAll(Arrays.asList(clazz.getDeclaredFields()));
}
Field[] fieldsArray = new Field[fields.size()];
return fields.toArray(fieldsArray);
} } | public class class_name {
static Field[] getAllFields(Class<?> type) {
List<Field> fields = new ArrayList<Field>();
for (Class<?> clazz = type; clazz != null; clazz = clazz.getSuperclass()) {
if (clazz == MPBase.class ||
clazz == Object.class) {
break;
}
fields.addAll(Arrays.asList(clazz.getDeclaredFields())); // depends on control dependency: [for], data = [clazz]
}
Field[] fieldsArray = new Field[fields.size()];
return fields.toArray(fieldsArray);
} } |
public class class_name {
private void performIntersect(final Sketch sketchIn) {
// curCount and input data are nonzero, match against HT
assert ((curCount_ > 0) && (!empty_));
final long[] cacheIn = sketchIn.getCache();
final int arrLongsIn = cacheIn.length;
final long[] hashTable;
if (mem_ != null) {
final int htLen = 1 << lgArrLongs_;
hashTable = new long[htLen];
mem_.getLongArray(CONST_PREAMBLE_LONGS << 3, hashTable, 0, htLen);
} else {
hashTable = hashTable_;
}
//allocate space for matching
final long[] matchSet = new long[ min(curCount_, sketchIn.getRetainedEntries(true)) ];
int matchSetCount = 0;
if (sketchIn.isOrdered()) {
//ordered compact, which enables early stop
for (int i = 0; i < arrLongsIn; i++ ) {
final long hashIn = cacheIn[i];
//if (hashIn <= 0L) continue; //<= 0 should not happen
if (hashIn >= thetaLong_) {
break; //early stop assumes that hashes in input sketch are ordered!
}
final int foundIdx = HashOperations.hashSearch(hashTable, lgArrLongs_, hashIn);
if (foundIdx == -1) { continue; }
matchSet[matchSetCount++] = hashIn;
}
}
else {
//either unordered compact or hash table
for (int i = 0; i < arrLongsIn; i++ ) {
final long hashIn = cacheIn[i];
if ((hashIn <= 0L) || (hashIn >= thetaLong_)) { continue; }
final int foundIdx = HashOperations.hashSearch(hashTable, lgArrLongs_, hashIn);
if (foundIdx == -1) { continue; }
matchSet[matchSetCount++] = hashIn;
}
}
//reduce effective array size to minimum
curCount_ = matchSetCount;
lgArrLongs_ = computeMinLgArrLongsFromCount(matchSetCount);
if (mem_ != null) {
insertCurCount(mem_, matchSetCount);
insertLgArrLongs(mem_, lgArrLongs_);
mem_.clear(CONST_PREAMBLE_LONGS << 3, 8 << lgArrLongs_); //clear for rebuild
} else {
Arrays.fill(hashTable_, 0, 1 << lgArrLongs_, 0L); //clear for rebuild
}
if (curCount_ > 0) {
moveDataToTgt(matchSet, matchSetCount); //move matchSet to target
} else {
if (thetaLong_ == Long.MAX_VALUE) {
empty_ = true;
}
}
} } | public class class_name {
private void performIntersect(final Sketch sketchIn) {
// curCount and input data are nonzero, match against HT
assert ((curCount_ > 0) && (!empty_));
final long[] cacheIn = sketchIn.getCache();
final int arrLongsIn = cacheIn.length;
final long[] hashTable;
if (mem_ != null) {
final int htLen = 1 << lgArrLongs_;
hashTable = new long[htLen]; // depends on control dependency: [if], data = [none]
mem_.getLongArray(CONST_PREAMBLE_LONGS << 3, hashTable, 0, htLen); // depends on control dependency: [if], data = [none]
} else {
hashTable = hashTable_; // depends on control dependency: [if], data = [none]
}
//allocate space for matching
final long[] matchSet = new long[ min(curCount_, sketchIn.getRetainedEntries(true)) ];
int matchSetCount = 0;
if (sketchIn.isOrdered()) {
//ordered compact, which enables early stop
for (int i = 0; i < arrLongsIn; i++ ) {
final long hashIn = cacheIn[i];
//if (hashIn <= 0L) continue; //<= 0 should not happen
if (hashIn >= thetaLong_) {
break; //early stop assumes that hashes in input sketch are ordered!
}
final int foundIdx = HashOperations.hashSearch(hashTable, lgArrLongs_, hashIn);
if (foundIdx == -1) { continue; }
matchSet[matchSetCount++] = hashIn; // depends on control dependency: [for], data = [none]
}
}
else {
//either unordered compact or hash table
for (int i = 0; i < arrLongsIn; i++ ) {
final long hashIn = cacheIn[i];
if ((hashIn <= 0L) || (hashIn >= thetaLong_)) { continue; }
final int foundIdx = HashOperations.hashSearch(hashTable, lgArrLongs_, hashIn);
if (foundIdx == -1) { continue; }
matchSet[matchSetCount++] = hashIn; // depends on control dependency: [for], data = [none]
}
}
//reduce effective array size to minimum
curCount_ = matchSetCount;
lgArrLongs_ = computeMinLgArrLongsFromCount(matchSetCount);
if (mem_ != null) {
insertCurCount(mem_, matchSetCount); // depends on control dependency: [if], data = [(mem_]
insertLgArrLongs(mem_, lgArrLongs_); // depends on control dependency: [if], data = [(mem_]
mem_.clear(CONST_PREAMBLE_LONGS << 3, 8 << lgArrLongs_); //clear for rebuild // depends on control dependency: [if], data = [none]
} else {
Arrays.fill(hashTable_, 0, 1 << lgArrLongs_, 0L); //clear for rebuild // depends on control dependency: [if], data = [none]
}
if (curCount_ > 0) {
moveDataToTgt(matchSet, matchSetCount); //move matchSet to target // depends on control dependency: [if], data = [none]
} else {
if (thetaLong_ == Long.MAX_VALUE) {
empty_ = true; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
@VisibleForTesting
static URI parseProxyAddress(@Nullable String proxyAddress) {
if (Strings.isNullOrEmpty(proxyAddress)) {
return null;
}
String uriString = (proxyAddress.contains("//") ? "" : "//") + proxyAddress;
try {
URI uri = new URI(uriString);
String scheme = uri.getScheme();
String host = uri.getHost();
int port = uri.getPort();
checkArgument(
Strings.isNullOrEmpty(scheme) || scheme.matches("https?"),
"HTTP proxy address '%s' has invalid scheme '%s'.", proxyAddress, scheme);
checkArgument(!Strings.isNullOrEmpty(host), "Proxy address '%s' has no host.", proxyAddress);
checkArgument(port != -1, "Proxy address '%s' has no port.", proxyAddress);
checkArgument(
uri.equals(new URI(scheme, null, host, port, null, null, null)),
"Invalid proxy address '%s'.", proxyAddress);
return uri;
} catch (URISyntaxException e) {
throw new IllegalArgumentException(
String.format("Invalid proxy address '%s'.", proxyAddress), e);
}
} } | public class class_name {
@VisibleForTesting
static URI parseProxyAddress(@Nullable String proxyAddress) {
if (Strings.isNullOrEmpty(proxyAddress)) {
return null; // depends on control dependency: [if], data = [none]
}
String uriString = (proxyAddress.contains("//") ? "" : "//") + proxyAddress;
try {
URI uri = new URI(uriString);
String scheme = uri.getScheme();
String host = uri.getHost();
int port = uri.getPort();
checkArgument(
Strings.isNullOrEmpty(scheme) || scheme.matches("https?"),
"HTTP proxy address '%s' has invalid scheme '%s'.", proxyAddress, scheme);
checkArgument(!Strings.isNullOrEmpty(host), "Proxy address '%s' has no host.", proxyAddress);
checkArgument(port != -1, "Proxy address '%s' has no port.", proxyAddress);
checkArgument(
uri.equals(new URI(scheme, null, host, port, null, null, null)),
"Invalid proxy address '%s'.", proxyAddress);
return uri;
} catch (URISyntaxException e) {
throw new IllegalArgumentException(
String.format("Invalid proxy address '%s'.", proxyAddress), e);
}
} } |
public class class_name {
private void updateTagData(Map<Poi, Map<String, String>> pois, String key, String value) {
for (Map.Entry<Poi, Map<String, String>> entry : pois.entrySet()) {
Poi poi = entry.getKey();
String tmpValue = value;
Map<String, String> tagmap = entry.getValue();
if (!tagmap.keySet().contains("name")) {
continue;
}
if (tagmap.keySet().contains(key)) {
// Process is_in tags
if (!key.equals("is_in")) {
continue;
}
String prev = tagmap.get(key);
// Continue if tag already set correctly
if (prev.contains(",") || prev.contains(";")) {
continue;
}
// If string is a correct value, append it to existent value;
if (tmpValue.contains(",") || tmpValue.contains(";")) {
tmpValue = (prev + "," + tmpValue);
}
}
//Write surrounding area as parent in "is_in" tag.
tagmap.put(key, tmpValue);
try {
this.pStmtUpdateData.setLong(2, poi.id);
this.pStmtUpdateData.setString(1, writer.tagsToString(tagmap));
this.pStmtUpdateData.addBatch();
batchCountRelation++;
if (batchCountRelation % PoiWriter.BATCH_LIMIT == 0) {
pStmtUpdateData.executeBatch();
pStmtUpdateData.clearBatch();
writer.conn.commit();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
} } | public class class_name {
private void updateTagData(Map<Poi, Map<String, String>> pois, String key, String value) {
for (Map.Entry<Poi, Map<String, String>> entry : pois.entrySet()) {
Poi poi = entry.getKey();
String tmpValue = value;
Map<String, String> tagmap = entry.getValue();
if (!tagmap.keySet().contains("name")) {
continue;
}
if (tagmap.keySet().contains(key)) {
// Process is_in tags
if (!key.equals("is_in")) {
continue;
}
String prev = tagmap.get(key);
// Continue if tag already set correctly
if (prev.contains(",") || prev.contains(";")) {
continue;
}
// If string is a correct value, append it to existent value;
if (tmpValue.contains(",") || tmpValue.contains(";")) {
tmpValue = (prev + "," + tmpValue); // depends on control dependency: [if], data = [none]
}
}
//Write surrounding area as parent in "is_in" tag.
tagmap.put(key, tmpValue); // depends on control dependency: [for], data = [none]
try {
this.pStmtUpdateData.setLong(2, poi.id); // depends on control dependency: [try], data = [none]
this.pStmtUpdateData.setString(1, writer.tagsToString(tagmap)); // depends on control dependency: [try], data = [none]
this.pStmtUpdateData.addBatch(); // depends on control dependency: [try], data = [none]
batchCountRelation++; // depends on control dependency: [try], data = [none]
if (batchCountRelation % PoiWriter.BATCH_LIMIT == 0) {
pStmtUpdateData.executeBatch(); // depends on control dependency: [if], data = [none]
pStmtUpdateData.clearBatch(); // depends on control dependency: [if], data = [none]
writer.conn.commit(); // depends on control dependency: [if], data = [none]
}
} catch (SQLException e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public Histogram toHistogram(final float[] breaks)
{
final double[] approximateBins = new double[breaks.length - 1];
double prev = sum(breaks[0]);
for (int i = 1; i < breaks.length; ++i) {
double s = sum(breaks[i]);
approximateBins[i - 1] = (float) (s - prev);
prev = s;
}
return new Histogram(breaks, approximateBins);
} } | public class class_name {
public Histogram toHistogram(final float[] breaks)
{
final double[] approximateBins = new double[breaks.length - 1];
double prev = sum(breaks[0]);
for (int i = 1; i < breaks.length; ++i) {
double s = sum(breaks[i]);
approximateBins[i - 1] = (float) (s - prev); // depends on control dependency: [for], data = [i]
prev = s; // depends on control dependency: [for], data = [none]
}
return new Histogram(breaks, approximateBins);
} } |
public class class_name {
public synchronized void getUpdatedData() {
if (!isUpdateDataNeeded()) {
return;
}
this._projectResult = new ProjectLrResults();
_workedBuilds = new ArrayList<Integer>();
RunList<? extends Run> projectBuilds = currentProject.getBuilds();
// updateLastBuild();
for (Run run : projectBuilds) {
PerformanceJobReportAction performanceJobReportAction = run.getAction(PerformanceJobReportAction.class);
if (performanceJobReportAction == null) {
continue;
}
if (run.isBuilding()) {
continue;
}
int runNumber = run.getNumber();
if (_workedBuilds.contains(runNumber)) {
continue;
}
_workedBuilds.add(runNumber);
LrJobResults jobLrResult = performanceJobReportAction.getLrResultBuildDataset();
// get all the ran scenario results from this run and insert them into the project
for (Map.Entry<String, JobLrScenarioResult> runResult : jobLrResult.getLrScenarioResults().entrySet()) {
// add the scenario if it's the first time it's ran in this build (allows scenarios to be also added
// at diffrent time)
if (!_projectResult.getScenarioResults().containsKey(runResult.getKey())) {
_projectResult.addScenario(new LrProjectScenarioResults(runResult.getKey()));
}
// Join the SLA rule results
LrProjectScenarioResults lrProjectScenarioResults =
_projectResult.getScenarioResults().get(runResult.getKey());
if(lrProjectScenarioResults.getBuildCount() > MAX_DISPLAY_BUILDS)
{
continue;
}
lrProjectScenarioResults.incBuildCount();
JobLrScenarioResult scenarioRunResult = runResult.getValue();
for (GoalResult goalResult : scenarioRunResult.scenarioSlaResults) {
scenarioGoalResult(runNumber, lrProjectScenarioResults, goalResult);
}
// Join sceanrio stats
joinSceanrioConnectionsStats(runNumber, lrProjectScenarioResults, scenarioRunResult);
joinVUserScenarioStats(runNumber, lrProjectScenarioResults, scenarioRunResult);
joinTransactionScenarioStats(runNumber, lrProjectScenarioResults, scenarioRunResult);
joinDurationStats(runNumber, lrProjectScenarioResults, scenarioRunResult);
}
}
} } | public class class_name {
public synchronized void getUpdatedData() {
if (!isUpdateDataNeeded()) {
return; // depends on control dependency: [if], data = [none]
}
this._projectResult = new ProjectLrResults();
_workedBuilds = new ArrayList<Integer>();
RunList<? extends Run> projectBuilds = currentProject.getBuilds();
// updateLastBuild();
for (Run run : projectBuilds) {
PerformanceJobReportAction performanceJobReportAction = run.getAction(PerformanceJobReportAction.class);
if (performanceJobReportAction == null) {
continue;
}
if (run.isBuilding()) {
continue;
}
int runNumber = run.getNumber();
if (_workedBuilds.contains(runNumber)) {
continue;
}
_workedBuilds.add(runNumber); // depends on control dependency: [for], data = [run]
LrJobResults jobLrResult = performanceJobReportAction.getLrResultBuildDataset();
// get all the ran scenario results from this run and insert them into the project
for (Map.Entry<String, JobLrScenarioResult> runResult : jobLrResult.getLrScenarioResults().entrySet()) {
// add the scenario if it's the first time it's ran in this build (allows scenarios to be also added
// at diffrent time)
if (!_projectResult.getScenarioResults().containsKey(runResult.getKey())) {
_projectResult.addScenario(new LrProjectScenarioResults(runResult.getKey())); // depends on control dependency: [if], data = [none]
}
// Join the SLA rule results
LrProjectScenarioResults lrProjectScenarioResults =
_projectResult.getScenarioResults().get(runResult.getKey());
if(lrProjectScenarioResults.getBuildCount() > MAX_DISPLAY_BUILDS)
{
continue;
}
lrProjectScenarioResults.incBuildCount(); // depends on control dependency: [for], data = [none]
JobLrScenarioResult scenarioRunResult = runResult.getValue();
for (GoalResult goalResult : scenarioRunResult.scenarioSlaResults) {
scenarioGoalResult(runNumber, lrProjectScenarioResults, goalResult); // depends on control dependency: [for], data = [goalResult]
}
// Join sceanrio stats
joinSceanrioConnectionsStats(runNumber, lrProjectScenarioResults, scenarioRunResult); // depends on control dependency: [for], data = [none]
joinVUserScenarioStats(runNumber, lrProjectScenarioResults, scenarioRunResult); // depends on control dependency: [for], data = [none]
joinTransactionScenarioStats(runNumber, lrProjectScenarioResults, scenarioRunResult); // depends on control dependency: [for], data = [none]
joinDurationStats(runNumber, lrProjectScenarioResults, scenarioRunResult); // depends on control dependency: [for], data = [none]
}
}
} } |
public class class_name {
private static void assertServerState(AdminClient adminClient,
Collection<Integer> nodeIds,
VoldemortState stateToCheck,
boolean serverMustBeInThisState) {
for(Integer nodeId: nodeIds) {
String nodeName = adminClient.getAdminClientCluster().getNodeById(nodeId).briefToString();
try {
Versioned<String> versioned = adminClient.metadataMgmtOps.getRemoteMetadata(nodeId,
MetadataStore.SERVER_STATE_KEY);
VoldemortState state = VoldemortState.valueOf(versioned.getValue());
if(state.equals(stateToCheck) != serverMustBeInThisState) {
throw new VoldemortException("Cannot execute admin operation: "
+ nodeName + " is " + (serverMustBeInThisState ? "not in " : "in ")
+ stateToCheck.name() + " state.");
}
} catch (UnreachableStoreException e) {
System.err.println("Cannot verify the server state of " + nodeName + " because it is unreachable. Skipping.");
}
}
} } | public class class_name {
private static void assertServerState(AdminClient adminClient,
Collection<Integer> nodeIds,
VoldemortState stateToCheck,
boolean serverMustBeInThisState) {
for(Integer nodeId: nodeIds) {
String nodeName = adminClient.getAdminClientCluster().getNodeById(nodeId).briefToString();
try {
Versioned<String> versioned = adminClient.metadataMgmtOps.getRemoteMetadata(nodeId,
MetadataStore.SERVER_STATE_KEY);
VoldemortState state = VoldemortState.valueOf(versioned.getValue());
if(state.equals(stateToCheck) != serverMustBeInThisState) {
throw new VoldemortException("Cannot execute admin operation: "
+ nodeName + " is " + (serverMustBeInThisState ? "not in " : "in ")
+ stateToCheck.name() + " state.");
}
} catch (UnreachableStoreException e) {
System.err.println("Cannot verify the server state of " + nodeName + " because it is unreachable. Skipping.");
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public String toTableString(boolean probs) {
Set<B> key2s = Sets.newHashSet();
for (A key : counts.keySet()) {
key2s.addAll(counts.get(key).keySet());
}
List<B> key2List = Lists.newArrayList(key2s);
StringBuilder sb = new StringBuilder();
sb.append("\t");
for (B key2 : key2List) {
sb.append(key2);
sb.append("\t");
}
sb.append("\n");
for (A key1 : counts.keySet()) {
sb.append(key1);
sb.append("\t");
for (B key2 : key2List) {
if (probs) {
sb.append(String.format("%.3f", getProbability(key1, key2)));
} else {
sb.append(getCount(key1, key2));
}
sb.append("\t");
}
sb.append("\n");
}
return sb.toString();
} } | public class class_name {
public String toTableString(boolean probs) {
Set<B> key2s = Sets.newHashSet();
for (A key : counts.keySet()) {
key2s.addAll(counts.get(key).keySet()); // depends on control dependency: [for], data = [key]
}
List<B> key2List = Lists.newArrayList(key2s);
StringBuilder sb = new StringBuilder();
sb.append("\t");
for (B key2 : key2List) {
sb.append(key2); // depends on control dependency: [for], data = [key2]
sb.append("\t"); // depends on control dependency: [for], data = [none]
}
sb.append("\n");
for (A key1 : counts.keySet()) {
sb.append(key1); // depends on control dependency: [for], data = [key1]
sb.append("\t"); // depends on control dependency: [for], data = [none]
for (B key2 : key2List) {
if (probs) {
sb.append(String.format("%.3f", getProbability(key1, key2))); // depends on control dependency: [if], data = [none]
} else {
sb.append(getCount(key1, key2)); // depends on control dependency: [if], data = [none]
}
sb.append("\t"); // depends on control dependency: [for], data = [none]
}
sb.append("\n"); // depends on control dependency: [for], data = [none]
}
return sb.toString();
} } |
public class class_name {
public static String getContnet() {
String result = "";
Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
try {
result = (String) clipboard.getData(DataFlavor.stringFlavor);
} catch (Exception ex) {
//noop
}
return result;
} } | public class class_name {
public static String getContnet() {
String result = "";
Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
try {
result = (String) clipboard.getData(DataFlavor.stringFlavor); // depends on control dependency: [try], data = [none]
} catch (Exception ex) {
//noop
} // depends on control dependency: [catch], data = [none]
return result;
} } |
public class class_name {
public static ArrayModifiableDBIDs[] partitionsFromIntegerLabels(DBIDs ids, IntegerDataStore assignment, int k) {
int[] sizes = new int[k];
for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
sizes[assignment.intValue(iter)] += 1;
}
ArrayModifiableDBIDs[] clusters = new ArrayModifiableDBIDs[k];
for(int i = 0; i < k; i++) {
clusters[i] = DBIDUtil.newArray(sizes[i]);
}
for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
clusters[assignment.intValue(iter)].add(iter);
}
return clusters;
} } | public class class_name {
public static ArrayModifiableDBIDs[] partitionsFromIntegerLabels(DBIDs ids, IntegerDataStore assignment, int k) {
int[] sizes = new int[k];
for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
sizes[assignment.intValue(iter)] += 1; // depends on control dependency: [for], data = [iter]
}
ArrayModifiableDBIDs[] clusters = new ArrayModifiableDBIDs[k];
for(int i = 0; i < k; i++) {
clusters[i] = DBIDUtil.newArray(sizes[i]); // depends on control dependency: [for], data = [i]
}
for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
clusters[assignment.intValue(iter)].add(iter); // depends on control dependency: [for], data = [iter]
}
return clusters;
} } |
public class class_name {
protected void onHoverIn(MouseOverEvent event) {
if ((getParentMenu().getSelectedItem() != null)
&& (getParentMenu().getSelectedItem() != this)
&& getParentMenu().getSelectedItem().hasSubmenu()) {
getParentMenu().getSelectedItem().getSubMenu().onClose();
getParentMenu().getSelectedItem().deselectItem();
}
if (hasSubmenu()) {
getSubMenu().openPopup(this);
}
selectItem();
} } | public class class_name {
protected void onHoverIn(MouseOverEvent event) {
if ((getParentMenu().getSelectedItem() != null)
&& (getParentMenu().getSelectedItem() != this)
&& getParentMenu().getSelectedItem().hasSubmenu()) {
getParentMenu().getSelectedItem().getSubMenu().onClose(); // depends on control dependency: [if], data = [none]
getParentMenu().getSelectedItem().deselectItem(); // depends on control dependency: [if], data = [none]
}
if (hasSubmenu()) {
getSubMenu().openPopup(this); // depends on control dependency: [if], data = [none]
}
selectItem();
} } |
public class class_name {
protected final void setFileExtensions(final String... fileExtensions) {
if (fileExtensions == null) {
this.fileExtensions = null;
} else {
this.fileExtensions = new ArrayList<String>();
this.fileExtensions.addAll(Arrays.asList(fileExtensions));
}
} } | public class class_name {
protected final void setFileExtensions(final String... fileExtensions) {
if (fileExtensions == null) {
this.fileExtensions = null;
// depends on control dependency: [if], data = [none]
} else {
this.fileExtensions = new ArrayList<String>();
// depends on control dependency: [if], data = [none]
this.fileExtensions.addAll(Arrays.asList(fileExtensions));
// depends on control dependency: [if], data = [(fileExtensions]
}
} } |
public class class_name {
public final String report() {
StringBuilder sb = new StringBuilder();
sb.append(String.format("%s stats: %s\n", name, this));
if (pubs.hasSamples()) {
String indent = " ";
if (subs.hasSamples()) {
sb.append(String.format("%sPub stats: %s\n", indent, pubs));
indent = " ";
}
if (pubs.getSamples().size() > 1) {
for (Sample stat : pubs.getSamples()) {
sb.append(String.format("%s[%2d] %s (%d msgs)\n", indent,
pubs.getSamples().indexOf(stat) + 1, stat, stat.jobMsgCnt));
}
sb.append(String.format("%s %s\n", indent, pubs.statistics()));
}
}
if (subs.hasSamples()) {
String indent = " ";
sb.append(String.format("%sSub stats: %s\n", indent, subs));
indent = " ";
if (subs.getSamples().size() > 1) {
for (Sample stat : subs.getSamples()) {
sb.append(String.format("%s[%2d] %s (%d msgs)\n", indent,
subs.getSamples().indexOf(stat) + 1, stat, stat.jobMsgCnt));
}
sb.append(String.format("%s %s\n", indent, subs.statistics()));
}
}
return sb.toString();
} } | public class class_name {
public final String report() {
StringBuilder sb = new StringBuilder();
sb.append(String.format("%s stats: %s\n", name, this));
if (pubs.hasSamples()) {
String indent = " ";
if (subs.hasSamples()) {
sb.append(String.format("%sPub stats: %s\n", indent, pubs)); // depends on control dependency: [if], data = [none]
indent = " "; // depends on control dependency: [if], data = [none]
}
if (pubs.getSamples().size() > 1) {
for (Sample stat : pubs.getSamples()) {
sb.append(String.format("%s[%2d] %s (%d msgs)\n", indent, // depends on control dependency: [for], data = [none]
pubs.getSamples().indexOf(stat) + 1, stat, stat.jobMsgCnt));
}
sb.append(String.format("%s %s\n", indent, pubs.statistics())); // depends on control dependency: [if], data = [none]
}
}
if (subs.hasSamples()) {
String indent = " ";
sb.append(String.format("%sSub stats: %s\n", indent, subs));
indent = " "; // depends on control dependency: [if], data = [none]
if (subs.getSamples().size() > 1) {
for (Sample stat : subs.getSamples()) {
sb.append(String.format("%s[%2d] %s (%d msgs)\n", indent, // depends on control dependency: [for], data = [none]
subs.getSamples().indexOf(stat) + 1, stat, stat.jobMsgCnt));
}
sb.append(String.format("%s %s\n", indent, subs.statistics())); // depends on control dependency: [if], data = [none]
}
}
return sb.toString();
} } |
public class class_name {
@Override
public EClass getIfcMassFlowRateMeasure() {
if (ifcMassFlowRateMeasureEClass == null) {
ifcMassFlowRateMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(835);
}
return ifcMassFlowRateMeasureEClass;
} } | public class class_name {
@Override
public EClass getIfcMassFlowRateMeasure() {
if (ifcMassFlowRateMeasureEClass == null) {
ifcMassFlowRateMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(835);
// depends on control dependency: [if], data = [none]
}
return ifcMassFlowRateMeasureEClass;
} } |
public class class_name {
final public void inclusiveOrExpression() throws ParseException {
exclusiveOrExpression();
label_4:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case 9:
case 10:
;
break;
default:
jj_la1[7] = jj_gen;
break label_4;
}
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case 9:
jj_consume_token(9);
break;
case 10:
jj_consume_token(10);
break;
default:
jj_la1[8] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
ASTBitOr jjtn001 = new ASTBitOr(JJTBITOR);
boolean jjtc001 = true;
jjtree.openNodeScope(jjtn001);
try {
exclusiveOrExpression();
} catch (Throwable jjte001) {
if (jjtc001) {
jjtree.clearNodeScope(jjtn001);
jjtc001 = false;
} else {
jjtree.popNode();
}
if (jjte001 instanceof RuntimeException) {
{if (true) throw (RuntimeException)jjte001;}
}
if (jjte001 instanceof ParseException) {
{if (true) throw (ParseException)jjte001;}
}
{if (true) throw (Error)jjte001;}
} finally {
if (jjtc001) {
jjtree.closeNodeScope(jjtn001, 2);
}
}
}
} } | public class class_name {
final public void inclusiveOrExpression() throws ParseException {
exclusiveOrExpression();
label_4:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case 9:
case 10:
;
break;
default:
jj_la1[7] = jj_gen;
break label_4;
}
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case 9:
jj_consume_token(9);
break;
case 10:
jj_consume_token(10);
break;
default:
jj_la1[8] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
ASTBitOr jjtn001 = new ASTBitOr(JJTBITOR);
boolean jjtc001 = true;
jjtree.openNodeScope(jjtn001);
try {
exclusiveOrExpression();
} catch (Throwable jjte001) {
if (jjtc001) {
jjtree.clearNodeScope(jjtn001); // depends on control dependency: [if], data = [none]
jjtc001 = false; // depends on control dependency: [if], data = [none]
} else {
jjtree.popNode(); // depends on control dependency: [if], data = [none]
}
if (jjte001 instanceof RuntimeException) {
{if (true) throw (RuntimeException)jjte001;}
}
if (jjte001 instanceof ParseException) {
{if (true) throw (ParseException)jjte001;}
}
{if (true) throw (Error)jjte001;}
} finally {
if (jjtc001) {
jjtree.closeNodeScope(jjtn001, 2); // depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
public static <V> PnkyPromise<V> composeAsync(final ThrowingSupplier<PnkyPromise<V>> operation,
final Executor executor)
{
final Pnky<V> pnky = Pnky.create();
executor.execute(new Runnable()
{
@Override
public void run()
{
if (pnky.state.compareAndSet(WAITING, RUNNING))
{
try
{
operation.get().alwaysAccept(new ThrowingBiConsumer<V, Throwable>()
{
@Override
public void accept(final V result, final Throwable error) throws Throwable
{
if (error != null)
{
pnky.reject(error);
}
else
{
pnky.resolve(result);
}
}
});
}
catch (final Throwable e)
{
pnky.reject(e);
}
}
}
});
return pnky;
} } | public class class_name {
public static <V> PnkyPromise<V> composeAsync(final ThrowingSupplier<PnkyPromise<V>> operation,
final Executor executor)
{
final Pnky<V> pnky = Pnky.create();
executor.execute(new Runnable()
{
@Override
public void run()
{
if (pnky.state.compareAndSet(WAITING, RUNNING))
{
try
{
operation.get().alwaysAccept(new ThrowingBiConsumer<V, Throwable>()
{
@Override
public void accept(final V result, final Throwable error) throws Throwable
{
if (error != null)
{
pnky.reject(error);
}
else
{
pnky.resolve(result);
}
}
}); // depends on control dependency: [try], data = [none]
}
catch (final Throwable e)
{
pnky.reject(e);
} // depends on control dependency: [catch], data = [none]
}
}
});
return pnky;
} } |
public class class_name {
private static void setBasicValue(Object entity, Field member, String columnName, UDTValue row,
CassandraType dataType, MetamodelImpl metamodel)
{
if(row.isNull(columnName)){
return;
}
Object retVal = null;
switch (dataType)
{
case BYTES:
// case CUSTOM:
retVal = row.getBytes(columnName);
if (retVal != null)
{
setFieldValue(entity, member, ((ByteBuffer) retVal).array());
}
break;
case BOOLEAN:
retVal = row.getBool(columnName);
setFieldValue(entity, member, retVal);
break;
case BIGINT:
// bigints in embeddables and element collections are mapped/defined
// by Long
case LONG:
case COUNTER:
retVal = row.getLong(columnName);
setFieldValue(entity, member, retVal);
break;
case DECIMAL:
retVal = row.getDecimal(columnName);
setFieldValue(entity, member, retVal);
break;
case DOUBLE:
retVal = row.getDouble(columnName);
setFieldValue(entity, member, retVal);
break;
case FLOAT:
retVal = row.getFloat(columnName);
setFieldValue(entity, member, retVal);
break;
case INET:
retVal = row.getInet(columnName);
setFieldValue(entity, member, retVal);
break;
case INT:
retVal = row.getInt(columnName);
retVal = setIntValue(member, retVal);
setFieldValue(entity, member, retVal);
break;
case ASCII:
case STRING:
case CHARACTER:
try
{
row.getBytes(columnName);
}
catch (Exception e)
{
// do nothing
}
retVal = row.getString(columnName);
retVal = setTextValue(entity, member, retVal);
setFieldValue(entity, member, retVal);
break;
case TIMESTAMP:
retVal = row.getTimestamp(columnName);
if (retVal != null && member != null)
retVal = CassandraDataTranslator.decompose(member.getType(),
ByteBufferUtil.bytes(((Date) retVal).getTime()).array(), true);
setFieldValue(entity, member, retVal);
break;
case UUID:
// case TIMEUUID:
retVal = row.getUUID(columnName);
setFieldValue(entity, member, retVal);
break;
case LIST:
Class listAttributeTypeClass = PropertyAccessorHelper.getGenericClass(member);
Class listClazz = null;
boolean isElementCollectionList = false;
if (listAttributeTypeClass.isAssignableFrom(byte[].class))
{
listClazz = ByteBuffer.class;
}
else if (listAttributeTypeClass.isAnnotationPresent(Embeddable.class))
{
isElementCollectionList = true;
listClazz = UDTValue.class;
}
else
{
listClazz = listAttributeTypeClass;
}
retVal = row.getList(columnName, listClazz);
Collection resultList = new ArrayList();
if (isElementCollectionList)
{
Iterator collectionItems = ((Collection) retVal).iterator();
while (collectionItems.hasNext())
{
resultList.add(setUDTValue(entity, listAttributeTypeClass, (UDTValue) collectionItems.next(),
metamodel));
}
}
if (retVal != null && !((List) retVal).isEmpty() && !isElementCollectionList)
{
if (listAttributeTypeClass.isAssignableFrom(byte[].class))
{
setFieldValue(entity, member, CassandraDataTranslator.marshalCollection(BytesType.class,
(Collection) retVal, listAttributeTypeClass, ArrayList.class));
}
else
{
Iterator collectionItems = ((Collection) retVal).iterator();
while (collectionItems.hasNext())
{
resultList.add(collectionItems.next());
}
setFieldValue(entity, member, resultList);
}
}
else if (retVal != null && !((Collection) retVal).isEmpty())
{
setFieldValue(entity, member, resultList);
}
break;
case SET:
Class setAttributeTypeClass = PropertyAccessorHelper.getGenericClass(member);
Class setClazz = null;
boolean isElementCollectionSet = false;
if (setAttributeTypeClass.isAssignableFrom(byte[].class))
{
setClazz = ByteBuffer.class;
}
else if (setAttributeTypeClass.isAnnotationPresent(Embeddable.class))
{
isElementCollectionSet = true;
setClazz = UDTValue.class;
}
else
{
setClazz = setAttributeTypeClass;
}
retVal = row.getSet(columnName, setClazz);
Collection resultSet = new HashSet();
if (isElementCollectionSet)
{
Iterator collectionItems = ((Collection) retVal).iterator();
while (collectionItems.hasNext())
{
resultSet.add(setUDTValue(entity, setAttributeTypeClass, (UDTValue) collectionItems.next(),
metamodel));
}
}
if (retVal != null && !((Set) retVal).isEmpty() && !isElementCollectionSet)
{
if (setAttributeTypeClass.isAssignableFrom(byte[].class))
{
setFieldValue(entity, member, CassandraDataTranslator.marshalCollection(BytesType.class,
(Collection) retVal, setAttributeTypeClass, HashSet.class));
}
else
{
Iterator collectionItems = ((Collection) retVal).iterator();
while (collectionItems.hasNext())
{
resultSet.add(collectionItems.next());
}
setFieldValue(entity, member, resultSet);
}
}
else if (retVal != null && !((Collection) retVal).isEmpty())
{
setFieldValue(entity, member, resultSet);
}
break;
/*
* ASCII, BIGINT, BLOB, BOOLEAN, COUNTER, DECIMAL, DOUBLE, FLOAT, INET,
* INT, TEXT, TIMESTAMP, UUID, VARCHAR, VARINT, TIMEUUID, LIST, SET,
* MAP, CUSTOM;
*/
case MAP:
List<Class<?>> mapGenericClasses = PropertyAccessorHelper.getGenericClasses(member);
Class keyClass = CassandraValidationClassMapper.getValidationClassInstance(mapGenericClasses.get(0), true);
Class valueClass = CassandraValidationClassMapper
.getValidationClassInstance(mapGenericClasses.get(1), true);
Class mapValueClazz = null;
boolean isElementCollectionMap = false;
if (mapGenericClasses.get(1).isAssignableFrom(byte[].class))
{
mapValueClazz = ByteBuffer.class;
}
else if (mapGenericClasses.get(1).isAnnotationPresent(Embeddable.class))
{
isElementCollectionMap = true;
mapValueClazz = UDTValue.class;
}
else
{
mapValueClazz = mapGenericClasses.get(1);
}
retVal = row.getMap(columnName, mapGenericClasses.get(0).isAssignableFrom(byte[].class) ? ByteBuffer.class
: mapGenericClasses.get(0), mapValueClazz);
Map resultMap = new HashMap();
if (isElementCollectionMap)
{
Iterator keys = ((Map) retVal).keySet().iterator();
while (keys.hasNext())
{
Object keyValue = keys.next();
resultMap.put(
keyValue,
setUDTValue(entity, mapGenericClasses.get(1), (UDTValue) ((Map) retVal).get(keyValue),
metamodel));
}
}
boolean isByteBuffer = mapGenericClasses.get(0).isAssignableFrom(byte[].class)
|| mapGenericClasses.get(1).isAssignableFrom(byte[].class);
// set the values.
if (retVal != null && !((Map) retVal).isEmpty() && !isElementCollectionMap)
{
if (isByteBuffer)
{
setFieldValue(entity, member,
CassandraDataTranslator.marshalMap(mapGenericClasses, keyClass, valueClass, (Map) retVal));
}
else
{
Iterator keys = ((Map) retVal).keySet().iterator();
while (keys.hasNext())
{
Object keyValue = keys.next();
resultMap.put(keyValue, ((Map) retVal).get(keyValue));
}
setFieldValue(entity, member, resultMap);
}
}
else if (retVal != null && !((Map) retVal).isEmpty())
{
setFieldValue(entity, member, resultMap);
}
break;
}
} } | public class class_name {
private static void setBasicValue(Object entity, Field member, String columnName, UDTValue row,
CassandraType dataType, MetamodelImpl metamodel)
{
if(row.isNull(columnName)){
return; // depends on control dependency: [if], data = [none]
}
Object retVal = null;
switch (dataType)
{
case BYTES:
// case CUSTOM:
retVal = row.getBytes(columnName);
if (retVal != null)
{
setFieldValue(entity, member, ((ByteBuffer) retVal).array()); // depends on control dependency: [if], data = [none]
}
break;
case BOOLEAN:
retVal = row.getBool(columnName);
setFieldValue(entity, member, retVal);
break;
case BIGINT:
// bigints in embeddables and element collections are mapped/defined
// by Long
case LONG:
case COUNTER:
retVal = row.getLong(columnName);
setFieldValue(entity, member, retVal);
break;
case DECIMAL:
retVal = row.getDecimal(columnName);
setFieldValue(entity, member, retVal);
break;
case DOUBLE:
retVal = row.getDouble(columnName);
setFieldValue(entity, member, retVal);
break;
case FLOAT:
retVal = row.getFloat(columnName);
setFieldValue(entity, member, retVal);
break;
case INET:
retVal = row.getInet(columnName);
setFieldValue(entity, member, retVal);
break;
case INT:
retVal = row.getInt(columnName);
retVal = setIntValue(member, retVal);
setFieldValue(entity, member, retVal);
break;
case ASCII:
case STRING:
case CHARACTER:
try
{
row.getBytes(columnName); // depends on control dependency: [try], data = [none]
}
catch (Exception e)
{
// do nothing
} // depends on control dependency: [catch], data = [none]
retVal = row.getString(columnName);
retVal = setTextValue(entity, member, retVal);
setFieldValue(entity, member, retVal);
break;
case TIMESTAMP:
retVal = row.getTimestamp(columnName);
if (retVal != null && member != null)
retVal = CassandraDataTranslator.decompose(member.getType(),
ByteBufferUtil.bytes(((Date) retVal).getTime()).array(), true);
setFieldValue(entity, member, retVal);
break;
case UUID:
// case TIMEUUID:
retVal = row.getUUID(columnName);
setFieldValue(entity, member, retVal);
break;
case LIST:
Class listAttributeTypeClass = PropertyAccessorHelper.getGenericClass(member);
Class listClazz = null;
boolean isElementCollectionList = false;
if (listAttributeTypeClass.isAssignableFrom(byte[].class))
{
listClazz = ByteBuffer.class; // depends on control dependency: [if], data = [none]
}
else if (listAttributeTypeClass.isAnnotationPresent(Embeddable.class))
{
isElementCollectionList = true; // depends on control dependency: [if], data = [none]
listClazz = UDTValue.class; // depends on control dependency: [if], data = [none]
}
else
{
listClazz = listAttributeTypeClass; // depends on control dependency: [if], data = [none]
}
retVal = row.getList(columnName, listClazz);
Collection resultList = new ArrayList();
if (isElementCollectionList)
{
Iterator collectionItems = ((Collection) retVal).iterator();
while (collectionItems.hasNext())
{
resultList.add(setUDTValue(entity, listAttributeTypeClass, (UDTValue) collectionItems.next(),
metamodel)); // depends on control dependency: [while], data = [none]
}
}
if (retVal != null && !((List) retVal).isEmpty() && !isElementCollectionList)
{
if (listAttributeTypeClass.isAssignableFrom(byte[].class))
{
setFieldValue(entity, member, CassandraDataTranslator.marshalCollection(BytesType.class,
(Collection) retVal, listAttributeTypeClass, ArrayList.class)); // depends on control dependency: [if], data = [none]
}
else
{
Iterator collectionItems = ((Collection) retVal).iterator();
while (collectionItems.hasNext())
{
resultList.add(collectionItems.next()); // depends on control dependency: [while], data = [none]
}
setFieldValue(entity, member, resultList); // depends on control dependency: [if], data = [none]
}
}
else if (retVal != null && !((Collection) retVal).isEmpty())
{
setFieldValue(entity, member, resultList); // depends on control dependency: [if], data = [none]
}
break;
case SET:
Class setAttributeTypeClass = PropertyAccessorHelper.getGenericClass(member);
Class setClazz = null;
boolean isElementCollectionSet = false;
if (setAttributeTypeClass.isAssignableFrom(byte[].class))
{
setClazz = ByteBuffer.class; // depends on control dependency: [if], data = [none]
}
else if (setAttributeTypeClass.isAnnotationPresent(Embeddable.class))
{
isElementCollectionSet = true; // depends on control dependency: [if], data = [none]
setClazz = UDTValue.class; // depends on control dependency: [if], data = [none]
}
else
{
setClazz = setAttributeTypeClass; // depends on control dependency: [if], data = [none]
}
retVal = row.getSet(columnName, setClazz);
Collection resultSet = new HashSet();
if (isElementCollectionSet)
{
Iterator collectionItems = ((Collection) retVal).iterator();
while (collectionItems.hasNext())
{
resultSet.add(setUDTValue(entity, setAttributeTypeClass, (UDTValue) collectionItems.next(),
metamodel)); // depends on control dependency: [while], data = [none]
}
}
if (retVal != null && !((Set) retVal).isEmpty() && !isElementCollectionSet)
{
if (setAttributeTypeClass.isAssignableFrom(byte[].class))
{
setFieldValue(entity, member, CassandraDataTranslator.marshalCollection(BytesType.class,
(Collection) retVal, setAttributeTypeClass, HashSet.class)); // depends on control dependency: [if], data = [none]
}
else
{
Iterator collectionItems = ((Collection) retVal).iterator();
while (collectionItems.hasNext())
{
resultSet.add(collectionItems.next()); // depends on control dependency: [while], data = [none]
}
setFieldValue(entity, member, resultSet); // depends on control dependency: [if], data = [none]
}
}
else if (retVal != null && !((Collection) retVal).isEmpty())
{
setFieldValue(entity, member, resultSet); // depends on control dependency: [if], data = [none]
}
break;
/*
* ASCII, BIGINT, BLOB, BOOLEAN, COUNTER, DECIMAL, DOUBLE, FLOAT, INET,
* INT, TEXT, TIMESTAMP, UUID, VARCHAR, VARINT, TIMEUUID, LIST, SET,
* MAP, CUSTOM;
*/
case MAP:
List<Class<?>> mapGenericClasses = PropertyAccessorHelper.getGenericClasses(member);
Class keyClass = CassandraValidationClassMapper.getValidationClassInstance(mapGenericClasses.get(0), true);
Class valueClass = CassandraValidationClassMapper
.getValidationClassInstance(mapGenericClasses.get(1), true);
Class mapValueClazz = null;
boolean isElementCollectionMap = false;
if (mapGenericClasses.get(1).isAssignableFrom(byte[].class))
{
mapValueClazz = ByteBuffer.class; // depends on control dependency: [if], data = [none]
}
else if (mapGenericClasses.get(1).isAnnotationPresent(Embeddable.class))
{
isElementCollectionMap = true; // depends on control dependency: [if], data = [none]
mapValueClazz = UDTValue.class; // depends on control dependency: [if], data = [none]
}
else
{
mapValueClazz = mapGenericClasses.get(1); // depends on control dependency: [if], data = [none]
}
retVal = row.getMap(columnName, mapGenericClasses.get(0).isAssignableFrom(byte[].class) ? ByteBuffer.class
: mapGenericClasses.get(0), mapValueClazz);
Map resultMap = new HashMap();
if (isElementCollectionMap)
{
Iterator keys = ((Map) retVal).keySet().iterator();
while (keys.hasNext())
{
Object keyValue = keys.next();
resultMap.put(
keyValue,
setUDTValue(entity, mapGenericClasses.get(1), (UDTValue) ((Map) retVal).get(keyValue),
metamodel)); // depends on control dependency: [while], data = [none]
}
}
boolean isByteBuffer = mapGenericClasses.get(0).isAssignableFrom(byte[].class)
|| mapGenericClasses.get(1).isAssignableFrom(byte[].class);
// set the values.
if (retVal != null && !((Map) retVal).isEmpty() && !isElementCollectionMap)
{
if (isByteBuffer)
{
setFieldValue(entity, member,
CassandraDataTranslator.marshalMap(mapGenericClasses, keyClass, valueClass, (Map) retVal)); // depends on control dependency: [if], data = [none]
}
else
{
Iterator keys = ((Map) retVal).keySet().iterator();
while (keys.hasNext())
{
Object keyValue = keys.next();
resultMap.put(keyValue, ((Map) retVal).get(keyValue)); // depends on control dependency: [while], data = [none]
}
setFieldValue(entity, member, resultMap); // depends on control dependency: [if], data = [none]
}
}
else if (retVal != null && !((Map) retVal).isEmpty())
{
setFieldValue(entity, member, resultMap); // depends on control dependency: [if], data = [none]
}
break;
}
} } |
public class class_name {
public void changePassword(final String oldPassword, final String newPassword) {
Authentication authentication = SecurityContextHolder.getContext()
.getAuthentication();
Assert.notNull(
authentication,
"No authentication object found in security context. Can't change current user's password!");
String username = authentication.getName();
logger.debug("Changing password for user '" + username);
DistinguishedName userDn = usernameMapper.buildDn(username);
if (usePasswordModifyExtensionOperation) {
changePasswordUsingExtensionOperation(userDn, oldPassword, newPassword);
} else {
changePasswordUsingAttributeModification(userDn, oldPassword, newPassword);
}
} } | public class class_name {
public void changePassword(final String oldPassword, final String newPassword) {
Authentication authentication = SecurityContextHolder.getContext()
.getAuthentication();
Assert.notNull(
authentication,
"No authentication object found in security context. Can't change current user's password!");
String username = authentication.getName();
logger.debug("Changing password for user '" + username);
DistinguishedName userDn = usernameMapper.buildDn(username);
if (usePasswordModifyExtensionOperation) {
changePasswordUsingExtensionOperation(userDn, oldPassword, newPassword); // depends on control dependency: [if], data = [none]
} else {
changePasswordUsingAttributeModification(userDn, oldPassword, newPassword); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static VTimeZone create(String tzid) {
BasicTimeZone basicTimeZone = TimeZone.getFrozenICUTimeZone(tzid, true);
if (basicTimeZone == null) {
return null;
}
VTimeZone vtz = new VTimeZone(tzid);
vtz.tz = (BasicTimeZone) basicTimeZone.cloneAsThawed();
vtz.olsonzid = vtz.tz.getID();
return vtz;
} } | public class class_name {
public static VTimeZone create(String tzid) {
BasicTimeZone basicTimeZone = TimeZone.getFrozenICUTimeZone(tzid, true);
if (basicTimeZone == null) {
return null; // depends on control dependency: [if], data = [none]
}
VTimeZone vtz = new VTimeZone(tzid);
vtz.tz = (BasicTimeZone) basicTimeZone.cloneAsThawed();
vtz.olsonzid = vtz.tz.getID();
return vtz;
} } |
public class class_name {
public void mapField(String oldFieldName, Integer pageNumber, String newFieldName) {
// page number = set of old field names
Set<String> parametersOnPage = parameterPageMap.get(pageNumber);
if (parametersOnPage == null) {
parametersOnPage = new HashSet<String>();
parameterPageMap.put(pageNumber, parametersOnPage);
}
parametersOnPage.add(oldFieldName);
// old field name = corresponding QueryField
QueryPageDAO queryPageDAO = new QueryPageDAO();
QueryPage queryPage = queryPageDAO.findByQueryAndPageNumber(targetQuery, pageNumber);
if (queryPage == null) {
throw new IllegalArgumentException("Unknown QueryPage by page number: " + pageNumber);
}
QueryFieldDAO queryFieldDAO = new QueryFieldDAO();
QueryField queryField = queryFieldDAO.findByQueryPageAndName(queryPage, newFieldName);
if (queryField == null) {
throw new IllegalArgumentException("Incorrect QueryField mapping: " + oldFieldName + "=" + pageNumber + "." + newFieldName);
}
if (queryFieldMap.containsKey(oldFieldName)) {
throw new IllegalArgumentException("Already mapped QueryField: " + oldFieldName);
}
queryFieldMap.put(oldFieldName, queryField);
} } | public class class_name {
public void mapField(String oldFieldName, Integer pageNumber, String newFieldName) {
// page number = set of old field names
Set<String> parametersOnPage = parameterPageMap.get(pageNumber);
if (parametersOnPage == null) {
parametersOnPage = new HashSet<String>(); // depends on control dependency: [if], data = [none]
parameterPageMap.put(pageNumber, parametersOnPage); // depends on control dependency: [if], data = [none]
}
parametersOnPage.add(oldFieldName);
// old field name = corresponding QueryField
QueryPageDAO queryPageDAO = new QueryPageDAO();
QueryPage queryPage = queryPageDAO.findByQueryAndPageNumber(targetQuery, pageNumber);
if (queryPage == null) {
throw new IllegalArgumentException("Unknown QueryPage by page number: " + pageNumber);
}
QueryFieldDAO queryFieldDAO = new QueryFieldDAO();
QueryField queryField = queryFieldDAO.findByQueryPageAndName(queryPage, newFieldName);
if (queryField == null) {
throw new IllegalArgumentException("Incorrect QueryField mapping: " + oldFieldName + "=" + pageNumber + "." + newFieldName);
}
if (queryFieldMap.containsKey(oldFieldName)) {
throw new IllegalArgumentException("Already mapped QueryField: " + oldFieldName);
}
queryFieldMap.put(oldFieldName, queryField);
} } |
public class class_name {
public void marshall(RegisterElasticIpRequest registerElasticIpRequest, ProtocolMarshaller protocolMarshaller) {
if (registerElasticIpRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(registerElasticIpRequest.getElasticIp(), ELASTICIP_BINDING);
protocolMarshaller.marshall(registerElasticIpRequest.getStackId(), STACKID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(RegisterElasticIpRequest registerElasticIpRequest, ProtocolMarshaller protocolMarshaller) {
if (registerElasticIpRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(registerElasticIpRequest.getElasticIp(), ELASTICIP_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(registerElasticIpRequest.getStackId(), STACKID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Trivial
@FFDCIgnore(InterruptedException.class)
void scheduledScan() {
// Don't perform a scheduled scan if this monitor holder is paused
if (isStopped)
return;
// 152229: Changed this code to get the monitor type locally. That is, now we save the monitor type in the constructor.
// We used to get the monitor type here by monitorRef.getProperty(FileMonitor.MONITOR_TYPE)). That caused a
// ConcurrentModificationException because of interference from the JMocked FileMonitor in the unit test code.
// Don't do anything if this is an external monitor
if (FileMonitor.MONITOR_TYPE_EXTERNAL.equals(monitorRef.getProperty(FileMonitor.MONITOR_TYPE))) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "scheduledScan - RETURN early - external monitor");
}
return;
}
// Don't do anything if the framework is stopping. Allow normal component cleanup
// to deactivate/clean up the scheduled tasks, but make this a no-op if the
// server is shutting down.
if (FrameworkState.isStopping()) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "scheduledScan - RETURN early - framework stopping");
}
return;
}
// Don't do anything unless we can set scanInProgress to true
// Use this to prevent scanning while a scan is in progress. Monitor notification must happen
// outside of the lock to prevent deadlocks.
if (!scanInProgress.compareAndSet(false, true)) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "scheduledScan - RETURN early - already scan in progress?");
}
return;
}
try {
Set<File> created = new HashSet<File>();
Set<File> deleted = new HashSet<File>();
Set<File> modified = new HashSet<File>();
// Try to obtain the scan lock -- this might fail if the monitor configuration is being updated
if (scanLock.tryLock()) {
try {
// Always try destroy when we obtain the lock: it will return true if this is in destroy or destroyed state
// Also (after we have tried doDestroy) ensure that we are in active state
if (!doDestroy() && (monitorState.get() == MonitorState.ACTIVE.ordinal())) {
if (coreService.isDetailedScanTraceEnabled() && TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "File monitor scan: begin", updateMonitors);
}
scanForUpdates(created, deleted, modified);
if (!created.isEmpty() || !modified.isEmpty() || !deleted.isEmpty()) {
// Check again, make sure there have been no further changes since the scan we just
// ran (we don't want to read the files until any updates are complete, files may be
// in process of being copied).
// what seems to be the vogue is to do this check to make sure nothing moved twice.
// i.e. keep the re-check interval at 100ms, but require two clean go-rounds before
// delivering the all clear.
boolean oneClean = false;
boolean twoClean = false;
List<File> createdCheck = new ArrayList<File>();
List<File> deletedCheck = new ArrayList<File>();
List<File> modifiedCheck = new ArrayList<File>();
do {
// Wait for 100 ms before checking again to give files time to finish
// copying if they are mid copy. Note this may not work for copying
// large files via programs like FTP where the copy may pause or
// if an OS creates the file and sets the size/last modified before
// the copy completes, but it should fix it for smaller files or for the
// test environment where some files are streamed over rather than copied.
try {
// Only used once and not sure it needs to be configurable so didn't create a
// constant for the delay period.
Thread.sleep(TIME_TO_WAIT_FOR_COPY_TO_COMPLETE);
} catch (InterruptedException ex) {
}
// Clear the lists, want a clean set rather than appending to existing to check
// if this loop is "update free". Do not clear the deletedCreatedCheck or
// deletedModifiedCheck as these need to track status over multiple loops.
createdCheck.clear();
deletedCheck.clear();
modifiedCheck.clear();
scanForUpdates(createdCheck, deletedCheck, modifiedCheck);
resolveChangesForScheduledScan(created, deleted, modified, createdCheck, deletedCheck, modifiedCheck);
if (createdCheck.isEmpty() && modifiedCheck.isEmpty() && deletedCheck.isEmpty()) {
// This run was clean-- hooray!
if (oneClean) {
twoClean = true; // <-- loop exit condition
} else {
oneClean = true; // <-- hopefully only one more time through
}
} else {
oneClean = false; // bummer.
}
// Keep going until we have two 100ms intervals with no changes
// (AND the runtime/framework is still happy)
} while (!twoClean && FrameworkState.isValid());
}
}
} catch (RuntimeException e) {
// TODO: MUST CATCH exceptions here (to at least get FFDC)... ick
} finally {
try {
doDestroy(); // always attempt destroy while we hold the lock
} finally {
scanLock.unlock();
}
}
if (!created.isEmpty() || !modified.isEmpty() || !deleted.isEmpty()) {
// changes were discovered: trace & call the registered file monitor
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "File monitor scan: end; resources changed",
created.size() + " created",
modified.size() + " modified",
deleted.size() + " deleted",
"running=" + FrameworkState.isValid());
}
// Even if we do get into a scan, make sure the framework is still good before we
// push the notification of updates-- Avoid propagating change notification
// while components that might react to them are being shut down
if (FrameworkState.isValid()) {
try {
monitor.onChange(created, modified, deleted);
// If the monitor handled the call cleanly, reset our exception count
exceptionCount = 0;
} catch (RuntimeException e) {
// FFDC instrumentation will go here
// Catch the exception so it doesn't kill the whole scheduler
exceptionCount++;
Tr.warning(tc, "fileMonitorException", created, modified, deleted, monitor.getClass(), e.getLocalizedMessage());
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "scheduledScan - exceptionCount=" + exceptionCount);
}
// If the monitor has thrown exceptions a few times in a row abandon
// monitoring for it
if (exceptionCount >= NUMBER_OF_EXCEPTIONS_BEFORE_DISABLING_MONITOR) {
Tr.warning(tc, "fileMonitorDisabled", NUMBER_OF_EXCEPTIONS_BEFORE_DISABLING_MONITOR, monitor.getClass());
// Reset the exceptionCount just in case we get re-enabled by outside forces for some unknown reason
exceptionCount = 0;
destroy();
}
}
} else {
//no framework, we should try to cleanup.
stop();
}
} else if (coreService.isDetailedScanTraceEnabled() && TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
// If super detailed trace is enabled, we trace the begin/end of all file scans
Tr.debug(this, tc, "File monitor scan: end; no changes");
}
} // end if tryLock
} finally {
scanInProgress.set(false);
}
} } | public class class_name {
@Trivial
@FFDCIgnore(InterruptedException.class)
void scheduledScan() {
// Don't perform a scheduled scan if this monitor holder is paused
if (isStopped)
return;
// 152229: Changed this code to get the monitor type locally. That is, now we save the monitor type in the constructor.
// We used to get the monitor type here by monitorRef.getProperty(FileMonitor.MONITOR_TYPE)). That caused a
// ConcurrentModificationException because of interference from the JMocked FileMonitor in the unit test code.
// Don't do anything if this is an external monitor
if (FileMonitor.MONITOR_TYPE_EXTERNAL.equals(monitorRef.getProperty(FileMonitor.MONITOR_TYPE))) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "scheduledScan - RETURN early - external monitor"); // depends on control dependency: [if], data = [none]
}
return; // depends on control dependency: [if], data = [none]
}
// Don't do anything if the framework is stopping. Allow normal component cleanup
// to deactivate/clean up the scheduled tasks, but make this a no-op if the
// server is shutting down.
if (FrameworkState.isStopping()) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "scheduledScan - RETURN early - framework stopping"); // depends on control dependency: [if], data = [none]
}
return; // depends on control dependency: [if], data = [none]
}
// Don't do anything unless we can set scanInProgress to true
// Use this to prevent scanning while a scan is in progress. Monitor notification must happen
// outside of the lock to prevent deadlocks.
if (!scanInProgress.compareAndSet(false, true)) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "scheduledScan - RETURN early - already scan in progress?"); // depends on control dependency: [if], data = [none]
}
return; // depends on control dependency: [if], data = [none]
}
try {
Set<File> created = new HashSet<File>();
Set<File> deleted = new HashSet<File>();
Set<File> modified = new HashSet<File>();
// Try to obtain the scan lock -- this might fail if the monitor configuration is being updated
if (scanLock.tryLock()) {
try {
// Always try destroy when we obtain the lock: it will return true if this is in destroy or destroyed state
// Also (after we have tried doDestroy) ensure that we are in active state
if (!doDestroy() && (monitorState.get() == MonitorState.ACTIVE.ordinal())) {
if (coreService.isDetailedScanTraceEnabled() && TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "File monitor scan: begin", updateMonitors); // depends on control dependency: [if], data = [none]
}
scanForUpdates(created, deleted, modified); // depends on control dependency: [if], data = [none]
if (!created.isEmpty() || !modified.isEmpty() || !deleted.isEmpty()) {
// Check again, make sure there have been no further changes since the scan we just
// ran (we don't want to read the files until any updates are complete, files may be
// in process of being copied).
// what seems to be the vogue is to do this check to make sure nothing moved twice.
// i.e. keep the re-check interval at 100ms, but require two clean go-rounds before
// delivering the all clear.
boolean oneClean = false;
boolean twoClean = false;
List<File> createdCheck = new ArrayList<File>();
List<File> deletedCheck = new ArrayList<File>();
List<File> modifiedCheck = new ArrayList<File>();
do {
// Wait for 100 ms before checking again to give files time to finish
// copying if they are mid copy. Note this may not work for copying
// large files via programs like FTP where the copy may pause or
// if an OS creates the file and sets the size/last modified before
// the copy completes, but it should fix it for smaller files or for the
// test environment where some files are streamed over rather than copied.
try {
// Only used once and not sure it needs to be configurable so didn't create a
// constant for the delay period.
Thread.sleep(TIME_TO_WAIT_FOR_COPY_TO_COMPLETE); // depends on control dependency: [try], data = [none]
} catch (InterruptedException ex) {
} // depends on control dependency: [catch], data = [none]
// Clear the lists, want a clean set rather than appending to existing to check
// if this loop is "update free". Do not clear the deletedCreatedCheck or
// deletedModifiedCheck as these need to track status over multiple loops.
createdCheck.clear();
deletedCheck.clear();
modifiedCheck.clear();
scanForUpdates(createdCheck, deletedCheck, modifiedCheck);
resolveChangesForScheduledScan(created, deleted, modified, createdCheck, deletedCheck, modifiedCheck);
if (createdCheck.isEmpty() && modifiedCheck.isEmpty() && deletedCheck.isEmpty()) {
// This run was clean-- hooray!
if (oneClean) {
twoClean = true; // <-- loop exit condition // depends on control dependency: [if], data = [none]
} else {
oneClean = true; // <-- hopefully only one more time through // depends on control dependency: [if], data = [none]
}
} else {
oneClean = false; // bummer. // depends on control dependency: [if], data = [none]
}
// Keep going until we have two 100ms intervals with no changes
// (AND the runtime/framework is still happy)
} while (!twoClean && FrameworkState.isValid());
}
}
} catch (RuntimeException e) {
// TODO: MUST CATCH exceptions here (to at least get FFDC)... ick
} finally { // depends on control dependency: [catch], data = [none]
try {
doDestroy(); // always attempt destroy while we hold the lock // depends on control dependency: [try], data = [none]
} finally {
scanLock.unlock();
}
}
if (!created.isEmpty() || !modified.isEmpty() || !deleted.isEmpty()) {
// changes were discovered: trace & call the registered file monitor
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "File monitor scan: end; resources changed",
created.size() + " created",
modified.size() + " modified",
deleted.size() + " deleted",
"running=" + FrameworkState.isValid()); // depends on control dependency: [if], data = [none]
}
// Even if we do get into a scan, make sure the framework is still good before we
// push the notification of updates-- Avoid propagating change notification
// while components that might react to them are being shut down
if (FrameworkState.isValid()) {
try {
monitor.onChange(created, modified, deleted); // depends on control dependency: [try], data = [none]
// If the monitor handled the call cleanly, reset our exception count
exceptionCount = 0; // depends on control dependency: [try], data = [exception]
} catch (RuntimeException e) {
// FFDC instrumentation will go here
// Catch the exception so it doesn't kill the whole scheduler
exceptionCount++;
Tr.warning(tc, "fileMonitorException", created, modified, deleted, monitor.getClass(), e.getLocalizedMessage());
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "scheduledScan - exceptionCount=" + exceptionCount); // depends on control dependency: [if], data = [none]
}
// If the monitor has thrown exceptions a few times in a row abandon
// monitoring for it
if (exceptionCount >= NUMBER_OF_EXCEPTIONS_BEFORE_DISABLING_MONITOR) {
Tr.warning(tc, "fileMonitorDisabled", NUMBER_OF_EXCEPTIONS_BEFORE_DISABLING_MONITOR, monitor.getClass()); // depends on control dependency: [if], data = [none]
// Reset the exceptionCount just in case we get re-enabled by outside forces for some unknown reason
exceptionCount = 0; // depends on control dependency: [if], data = [none]
destroy(); // depends on control dependency: [if], data = [none]
}
} // depends on control dependency: [catch], data = [none]
} else {
//no framework, we should try to cleanup.
stop(); // depends on control dependency: [if], data = [none]
}
} else if (coreService.isDetailedScanTraceEnabled() && TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
// If super detailed trace is enabled, we trace the begin/end of all file scans
Tr.debug(this, tc, "File monitor scan: end; no changes"); // depends on control dependency: [if], data = [none]
}
} // end if tryLock
} finally {
scanInProgress.set(false);
}
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.