code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
@SuppressWarnings("unchecked")
public static <T> T toObjectWithXMLDecoder(final String xmlString)
{
XMLDecoder dec = null;
T obj = null;
try
{
final InputStream is = new ByteArrayInputStream(xmlString.getBytes());
dec = new XMLDecoder(is);
obj = (T)dec.readObject();
}
finally
{
if (dec != null)
{
dec.close();
}
}
return obj;
} } | public class class_name {
@SuppressWarnings("unchecked")
public static <T> T toObjectWithXMLDecoder(final String xmlString)
{
XMLDecoder dec = null;
T obj = null;
try
{
final InputStream is = new ByteArrayInputStream(xmlString.getBytes());
dec = new XMLDecoder(is); // depends on control dependency: [try], data = [none]
obj = (T)dec.readObject(); // depends on control dependency: [try], data = [none]
}
finally
{
if (dec != null)
{
dec.close(); // depends on control dependency: [if], data = [none]
}
}
return obj;
} } |
public class class_name {
private static void populateCache() {
if (cache == null) {
cache = new LinkedHashMap<>();
logger.info("IDataDecoders found:");
ServiceLoader<IDataDecoder> loader =
ServiceLoader.load(IDataDecoder.class);
// Logging.
for (IDataDecoder discoveredDecoder : loader) {
String name = discoveredDecoder.getClass().getCanonicalName();
String decoderMimeType = discoveredDecoder.getMimeType();
logger.info(String.format(" %s -> %s",
decoderMimeType, name));
cache.put(decoderMimeType, discoveredDecoder.getClass());
}
}
} } | public class class_name {
private static void populateCache() {
if (cache == null) {
cache = new LinkedHashMap<>(); // depends on control dependency: [if], data = [none]
logger.info("IDataDecoders found:"); // depends on control dependency: [if], data = [none]
ServiceLoader<IDataDecoder> loader =
ServiceLoader.load(IDataDecoder.class);
// Logging.
for (IDataDecoder discoveredDecoder : loader) {
String name = discoveredDecoder.getClass().getCanonicalName();
String decoderMimeType = discoveredDecoder.getMimeType();
logger.info(String.format(" %s -> %s",
decoderMimeType, name)); // depends on control dependency: [for], data = [none]
cache.put(decoderMimeType, discoveredDecoder.getClass()); // depends on control dependency: [for], data = [discoveredDecoder]
}
}
} } |
public class class_name {
public void addOrUpdate(long[] indexes, double value) {
long[] physicalIndexes = isView() ? translateToPhysical(indexes) : indexes;
for (int i = 0; i < length; i++) {
long[] idx = getUnderlyingIndicesOf(i).asLong();
if (Arrays.equals(idx, physicalIndexes)) {
// There is already a non-null value at this index
// -> update the current value, the sort is maintained
if (value == 0) {
removeEntry(i);
length--;
} else {
values.put(i, value);
length++;
}
return;
}
}
// If the value is 0 and there is no existing non-null value at the given index
if (value == 0) {
return;
}
/* It's a new non-null element. We add the value and the indexes at the end of their respective databuffers.
* The buffers are no longer sorted !
* /!\ We need to reallocate the buffers if they are full
*/
while (!canInsert(values, 1)) {
long size = (long) Math.ceil((values.capacity() * THRESHOLD_MEMORY_ALLOCATION));
values.reallocate(size);
}
values.put(length, value);
while (!canInsert(indices, physicalIndexes.length)) {
long size = (long) Math.ceil((indices.capacity() * THRESHOLD_MEMORY_ALLOCATION));
indices.reallocate(size);
}
for (int i = 0; i < physicalIndexes.length; i++) {
indices.put(length * rank() + i, physicalIndexes[i]);
}
length++;
isSorted = false;
} } | public class class_name {
public void addOrUpdate(long[] indexes, double value) {
long[] physicalIndexes = isView() ? translateToPhysical(indexes) : indexes;
for (int i = 0; i < length; i++) {
long[] idx = getUnderlyingIndicesOf(i).asLong();
if (Arrays.equals(idx, physicalIndexes)) {
// There is already a non-null value at this index
// -> update the current value, the sort is maintained
if (value == 0) {
removeEntry(i); // depends on control dependency: [if], data = [none]
length--; // depends on control dependency: [if], data = [none]
} else {
values.put(i, value); // depends on control dependency: [if], data = [none]
length++; // depends on control dependency: [if], data = [none]
}
return; // depends on control dependency: [if], data = [none]
}
}
// If the value is 0 and there is no existing non-null value at the given index
if (value == 0) {
return; // depends on control dependency: [if], data = [none]
}
/* It's a new non-null element. We add the value and the indexes at the end of their respective databuffers.
* The buffers are no longer sorted !
* /!\ We need to reallocate the buffers if they are full
*/
while (!canInsert(values, 1)) {
long size = (long) Math.ceil((values.capacity() * THRESHOLD_MEMORY_ALLOCATION));
values.reallocate(size); // depends on control dependency: [while], data = [none]
}
values.put(length, value);
while (!canInsert(indices, physicalIndexes.length)) {
long size = (long) Math.ceil((indices.capacity() * THRESHOLD_MEMORY_ALLOCATION));
indices.reallocate(size); // depends on control dependency: [while], data = [none]
}
for (int i = 0; i < physicalIndexes.length; i++) {
indices.put(length * rank() + i, physicalIndexes[i]); // depends on control dependency: [for], data = [i]
}
length++;
isSorted = false;
} } |
public class class_name {
public boolean create() {
ConcurrentHashMap<String, UsageParamSetLocalData> notificationSourceUsageParamSets = notificationSourceUsageParamSetsMap.get(notificationSource);
if (notificationSourceUsageParamSets == null) {
ConcurrentHashMap<String, UsageParamSetLocalData> newNotificationSourceUsageParamSets = new ConcurrentHashMap<String, UsageParamSetLocalData>();
notificationSourceUsageParamSets = notificationSourceUsageParamSetsMap.putIfAbsent(notificationSource, newNotificationSourceUsageParamSets);
if (notificationSourceUsageParamSets == null) {
notificationSourceUsageParamSets = newNotificationSourceUsageParamSets;
}
}
if (!notificationSourceUsageParamSets.containsKey(usageParameterSetName)) {
return notificationSourceUsageParamSets.putIfAbsent(usageParameterSetName,new UsageParamSetLocalData(usageParameterSetName)) == null;
}
else {
return false;
}
} } | public class class_name {
public boolean create() {
ConcurrentHashMap<String, UsageParamSetLocalData> notificationSourceUsageParamSets = notificationSourceUsageParamSetsMap.get(notificationSource);
if (notificationSourceUsageParamSets == null) {
ConcurrentHashMap<String, UsageParamSetLocalData> newNotificationSourceUsageParamSets = new ConcurrentHashMap<String, UsageParamSetLocalData>();
notificationSourceUsageParamSets = notificationSourceUsageParamSetsMap.putIfAbsent(notificationSource, newNotificationSourceUsageParamSets); // depends on control dependency: [if], data = [none]
if (notificationSourceUsageParamSets == null) {
notificationSourceUsageParamSets = newNotificationSourceUsageParamSets; // depends on control dependency: [if], data = [none]
}
}
if (!notificationSourceUsageParamSets.containsKey(usageParameterSetName)) {
return notificationSourceUsageParamSets.putIfAbsent(usageParameterSetName,new UsageParamSetLocalData(usageParameterSetName)) == null; // depends on control dependency: [if], data = [none]
}
else {
return false; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static List<Pair<Long, Double>> readScoredItems(final File userRecommendationFile, final Long user) throws IOException {
final Map<Long, List<Pair<Long, Double>>> mapUserRecommendations = new HashMap<Long, List<Pair<Long, Double>>>();
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(userRecommendationFile), "UTF-8"));
try {
String line = null;
boolean foundUser = false;
// read recommendations: user \t item \t score
while ((line = in.readLine()) != null) {
String[] toks = line.split("\t");
String u = toks[0];
if (u.equals(user + "")) {
StrategyIO.readLine(line, mapUserRecommendations);
foundUser = true;
} else if (foundUser) {
// assuming a sorted file (at least, per user)
break;
}
}
} finally {
in.close();
}
return mapUserRecommendations.get(user);
} } | public class class_name {
public static List<Pair<Long, Double>> readScoredItems(final File userRecommendationFile, final Long user) throws IOException {
final Map<Long, List<Pair<Long, Double>>> mapUserRecommendations = new HashMap<Long, List<Pair<Long, Double>>>();
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(userRecommendationFile), "UTF-8"));
try {
String line = null;
boolean foundUser = false;
// read recommendations: user \t item \t score
while ((line = in.readLine()) != null) {
String[] toks = line.split("\t");
String u = toks[0];
if (u.equals(user + "")) {
StrategyIO.readLine(line, mapUserRecommendations); // depends on control dependency: [if], data = [none]
foundUser = true; // depends on control dependency: [if], data = [none]
} else if (foundUser) {
// assuming a sorted file (at least, per user)
break;
}
}
} finally {
in.close();
}
return mapUserRecommendations.get(user);
} } |
public class class_name {
@Override
public synchronized void authenticate(Context context, SocializeAuthListener authListener) {
if(assertInitialized(context, authListener)) {
userSystem.authenticate(context, authListener, this);
}
} } | public class class_name {
@Override
public synchronized void authenticate(Context context, SocializeAuthListener authListener) {
if(assertInitialized(context, authListener)) {
userSystem.authenticate(context, authListener, this); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public void setCursorPosition(TerminalPosition position) {
if(position == null) {
//Skip any validation checks if we just want to hide the cursor
this.cursorPosition = null;
return;
}
if(position.getColumn() < 0) {
position = position.withColumn(0);
}
if(position.getRow() < 0) {
position = position.withRow(0);
}
if(position.getColumn() >= terminalSize.getColumns()) {
position = position.withColumn(terminalSize.getColumns() - 1);
}
if(position.getRow() >= terminalSize.getRows()) {
position = position.withRow(terminalSize.getRows() - 1);
}
this.cursorPosition = position;
} } | public class class_name {
@Override
public void setCursorPosition(TerminalPosition position) {
if(position == null) {
//Skip any validation checks if we just want to hide the cursor
this.cursorPosition = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
if(position.getColumn() < 0) {
position = position.withColumn(0); // depends on control dependency: [if], data = [0)]
}
if(position.getRow() < 0) {
position = position.withRow(0); // depends on control dependency: [if], data = [0)]
}
if(position.getColumn() >= terminalSize.getColumns()) {
position = position.withColumn(terminalSize.getColumns() - 1); // depends on control dependency: [if], data = [none]
}
if(position.getRow() >= terminalSize.getRows()) {
position = position.withRow(terminalSize.getRows() - 1); // depends on control dependency: [if], data = [none]
}
this.cursorPosition = position;
} } |
public class class_name {
public static byte[] readBytes( File file ) throws IOException {
if (file == null) return new byte[] {};
InputStream stream = new BufferedInputStream(new FileInputStream(file));
boolean error = false;
try {
return readBytes(stream);
} catch (IOException e) {
error = true; // this error should be thrown, even if there is an error closing stream
throw e;
} catch (RuntimeException e) {
error = true; // this error should be thrown, even if there is an error closing stream
throw e;
} finally {
try {
stream.close();
} catch (IOException e) {
if (!error) throw e;
}
}
} } | public class class_name {
public static byte[] readBytes( File file ) throws IOException {
if (file == null) return new byte[] {};
InputStream stream = new BufferedInputStream(new FileInputStream(file));
boolean error = false;
try {
return readBytes(stream);
} catch (IOException e) {
error = true; // this error should be thrown, even if there is an error closing stream
throw e;
} catch (RuntimeException e) {
error = true; // this error should be thrown, even if there is an error closing stream
throw e;
} finally {
try {
stream.close(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
if (!error) throw e;
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public static boolean convertToBoolean(Object o)
{
if(o == null)
return false;
if(o instanceof Boolean)
{
return (Boolean) o;
}
return convertToBoolean(o.toString());
} } | public class class_name {
public static boolean convertToBoolean(Object o)
{
if(o == null)
return false;
if(o instanceof Boolean)
{
return (Boolean) o; // depends on control dependency: [if], data = [none]
}
return convertToBoolean(o.toString());
} } |
public class class_name {
@Override
public void loadImageThumbnail(final ImageView iv, String imageUrl, int dimension) {
if (!TextUtils.isEmpty(imageUrl)) {
Picasso.with(iv.getContext())
.load(imageUrl)
.resize(dimension, dimension)
.centerCrop()
.into(iv);
} else {
iv.setImageDrawable(null);
}
} } | public class class_name {
@Override
public void loadImageThumbnail(final ImageView iv, String imageUrl, int dimension) {
if (!TextUtils.isEmpty(imageUrl)) {
Picasso.with(iv.getContext())
.load(imageUrl)
.resize(dimension, dimension)
.centerCrop()
.into(iv); // depends on control dependency: [if], data = [none]
} else {
iv.setImageDrawable(null); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static <T> List<List<T>> getPrefixesAndSuffixes(List<T> items, int minSize, int maxSize, T paddingSymbol, boolean includePrefixes, boolean includeSuffixes) {
assert minSize > 0;
assert maxSize >= minSize;
assert includePrefixes || includeSuffixes;
List<List<T>> prefixesAndSuffixes = new ArrayList<List<T>>();
for (int span = minSize - 1; span < maxSize; span++) {
List<Integer> indices = new ArrayList<Integer>();
List<T> seq = new ArrayList<T>();
if (includePrefixes) {
for (int i = 0; i <= span; i++) {
indices.add(i);
}
}
if (includeSuffixes) {
int maxIndex = items.size() - 1;
for (int i = span; i >= 0; i--) {
indices.add(maxIndex - i);
}
}
for (int i : indices) {
try {
seq.add(items.get(i));
} catch (IndexOutOfBoundsException ioobe) {
seq.add(paddingSymbol);
}
}
prefixesAndSuffixes.add(seq);
}
return prefixesAndSuffixes;
} } | public class class_name {
public static <T> List<List<T>> getPrefixesAndSuffixes(List<T> items, int minSize, int maxSize, T paddingSymbol, boolean includePrefixes, boolean includeSuffixes) {
assert minSize > 0;
assert maxSize >= minSize;
assert includePrefixes || includeSuffixes;
List<List<T>> prefixesAndSuffixes = new ArrayList<List<T>>();
for (int span = minSize - 1; span < maxSize; span++) {
List<Integer> indices = new ArrayList<Integer>();
List<T> seq = new ArrayList<T>();
if (includePrefixes) {
for (int i = 0; i <= span; i++) {
indices.add(i);
// depends on control dependency: [for], data = [i]
}
}
if (includeSuffixes) {
int maxIndex = items.size() - 1;
for (int i = span; i >= 0; i--) {
indices.add(maxIndex - i);
// depends on control dependency: [for], data = [i]
}
}
for (int i : indices) {
try {
seq.add(items.get(i));
// depends on control dependency: [try], data = [none]
} catch (IndexOutOfBoundsException ioobe) {
seq.add(paddingSymbol);
}
// depends on control dependency: [catch], data = [none]
}
prefixesAndSuffixes.add(seq);
// depends on control dependency: [for], data = [none]
}
return prefixesAndSuffixes;
} } |
public class class_name {
public void addFingerprint(SIBUuid8 meUuid) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "addFingerprint", meUuid);
String meUuidString = meUuid.toString();
// If the fingerprint list is empty, we need to create an empty list and add
// the given MEUuid to it.
if (getHdr2().getChoiceField(JsHdr2Access.FINGERPRINTS) == JsHdr2Access.IS_FINGERPRINTS_EMPTY) {
List<String> ids = new ArrayList<String>();
ids.add(meUuidString);
getHdr2().setField(JsHdr2Access.FINGERPRINTS_RFPLIST_ID, ids);
}
// If there is a list, we need to extract it. There is no point cacheing the
// list as this will only ever be called once per message in an ME.
else {
List<String> ids = (List<String>) getHdr2().getField(JsHdr2Access.FINGERPRINTS_RFPLIST_ID);
// A JSList does not implement add(), because JMF Lists can't be added to
// 'in situ', so we have to 'clone' it, add to the clone & set the clone into the message.
List<String> newIds = new ArrayList<String>(ids);
newIds.add(meUuidString);
getHdr2().setField(JsHdr2Access.FINGERPRINTS_RFPLIST_ID, newIds);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "addFingerprint");
} } | public class class_name {
public void addFingerprint(SIBUuid8 meUuid) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "addFingerprint", meUuid);
String meUuidString = meUuid.toString();
// If the fingerprint list is empty, we need to create an empty list and add
// the given MEUuid to it.
if (getHdr2().getChoiceField(JsHdr2Access.FINGERPRINTS) == JsHdr2Access.IS_FINGERPRINTS_EMPTY) {
List<String> ids = new ArrayList<String>();
ids.add(meUuidString); // depends on control dependency: [if], data = [none]
getHdr2().setField(JsHdr2Access.FINGERPRINTS_RFPLIST_ID, ids); // depends on control dependency: [if], data = [none]
}
// If there is a list, we need to extract it. There is no point cacheing the
// list as this will only ever be called once per message in an ME.
else {
List<String> ids = (List<String>) getHdr2().getField(JsHdr2Access.FINGERPRINTS_RFPLIST_ID);
// A JSList does not implement add(), because JMF Lists can't be added to
// 'in situ', so we have to 'clone' it, add to the clone & set the clone into the message.
List<String> newIds = new ArrayList<String>(ids);
newIds.add(meUuidString); // depends on control dependency: [if], data = [none]
getHdr2().setField(JsHdr2Access.FINGERPRINTS_RFPLIST_ID, newIds); // depends on control dependency: [if], data = [none]
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "addFingerprint");
} } |
public class class_name {
public void writeTitle(String titleName, Object data) {
Object[] attrs = (Object[]) data;
try {
DBFField fields[] = new DBFField[attrs.length];
for (int i = 0; i < fields.length; i++) {
fields[i] = new DBFField();
fields[i].setName((String) attrs[i]);
fields[i].setDataType(DBFField.FIELD_TYPE_C);
// FIXME
fields[i].setFieldLength(500);
}
writer.setFields(fields);
} catch (DBFException e) {
throw new RuntimeException(e.getMessage());
}
} } | public class class_name {
public void writeTitle(String titleName, Object data) {
Object[] attrs = (Object[]) data;
try {
DBFField fields[] = new DBFField[attrs.length];
for (int i = 0; i < fields.length; i++) {
fields[i] = new DBFField(); // depends on control dependency: [for], data = [i]
fields[i].setName((String) attrs[i]); // depends on control dependency: [for], data = [i]
fields[i].setDataType(DBFField.FIELD_TYPE_C); // depends on control dependency: [for], data = [i]
// FIXME
fields[i].setFieldLength(500); // depends on control dependency: [for], data = [i]
}
writer.setFields(fields); // depends on control dependency: [try], data = [none]
} catch (DBFException e) {
throw new RuntimeException(e.getMessage());
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private static Renderer createRenderer(final String rendererName) {
if (rendererName.endsWith(".vm")) {
// This is a velocity template, so use a VelocityLayout
return new VelocityRenderer(rendererName);
}
try {
Class<?> managerClass = Class.forName(rendererName);
Object manager = managerClass.newInstance();
if (!(manager instanceof Renderer)) {
throw new SystemException(rendererName + " is not a Renderer");
}
return (Renderer) manager;
} catch (ClassNotFoundException e) {
// Legal - there might not a manager implementation in a given theme
return null;
} catch (InstantiationException e) {
throw new SystemException("Failed to instantiate " + rendererName, e);
} catch (IllegalAccessException e) {
throw new SystemException("Failed to access " + rendererName, e);
}
} } | public class class_name {
private static Renderer createRenderer(final String rendererName) {
if (rendererName.endsWith(".vm")) {
// This is a velocity template, so use a VelocityLayout
return new VelocityRenderer(rendererName); // depends on control dependency: [if], data = [none]
}
try {
Class<?> managerClass = Class.forName(rendererName);
Object manager = managerClass.newInstance();
if (!(manager instanceof Renderer)) {
throw new SystemException(rendererName + " is not a Renderer");
}
return (Renderer) manager; // depends on control dependency: [try], data = [none]
} catch (ClassNotFoundException e) {
// Legal - there might not a manager implementation in a given theme
return null;
} catch (InstantiationException e) { // depends on control dependency: [catch], data = [none]
throw new SystemException("Failed to instantiate " + rendererName, e);
} catch (IllegalAccessException e) { // depends on control dependency: [catch], data = [none]
throw new SystemException("Failed to access " + rendererName, e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public Configuration create(File file) {
try {
Configuration configuration;
InputStream stream = new BufferedInputStream(new FileInputStream(file));
try {
configuration = create(stream);
} finally {
stream.close();
}
return configuration;
} catch (IOException e) {
throw new TextProcessorFactoryException(e);
}
} } | public class class_name {
public Configuration create(File file) {
try {
Configuration configuration;
InputStream stream = new BufferedInputStream(new FileInputStream(file));
try {
configuration = create(stream); // depends on control dependency: [try], data = [none]
} finally {
stream.close();
}
return configuration; // depends on control dependency: [try], data = [none]
} catch (IOException e) {
throw new TextProcessorFactoryException(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private String resolveExpressionString(final String unresolvedString) throws OperationFailedException {
// parseAndResolve should only be providing expressions with no leading or trailing chars
assert unresolvedString.startsWith("${") && unresolvedString.endsWith("}");
// Default result is no change from input
String result = unresolvedString;
ModelNode resolveNode = new ModelNode(new ValueExpression(unresolvedString));
// Try plug-in resolution; i.e. vault
resolvePluggableExpression(resolveNode);
if (resolveNode.getType() == ModelType.EXPRESSION ) {
// resolvePluggableExpression did nothing. Try standard resolution
String resolvedString = resolveStandardExpression(resolveNode);
if (!unresolvedString.equals(resolvedString)) {
// resolveStandardExpression made progress
result = resolvedString;
} // else there is nothing more we can do with this string
} else {
// resolvePluggableExpression made progress
result = resolveNode.asString();
}
return result;
} } | public class class_name {
private String resolveExpressionString(final String unresolvedString) throws OperationFailedException {
// parseAndResolve should only be providing expressions with no leading or trailing chars
assert unresolvedString.startsWith("${") && unresolvedString.endsWith("}");
// Default result is no change from input
String result = unresolvedString;
ModelNode resolveNode = new ModelNode(new ValueExpression(unresolvedString));
// Try plug-in resolution; i.e. vault
resolvePluggableExpression(resolveNode);
if (resolveNode.getType() == ModelType.EXPRESSION ) {
// resolvePluggableExpression did nothing. Try standard resolution
String resolvedString = resolveStandardExpression(resolveNode);
if (!unresolvedString.equals(resolvedString)) {
// resolveStandardExpression made progress
result = resolvedString; // depends on control dependency: [if], data = [none]
} // else there is nothing more we can do with this string
} else {
// resolvePluggableExpression made progress
result = resolveNode.asString();
}
return result;
} } |
public class class_name {
@SuppressWarnings("unchecked")
public static <T extends RemoteProxy> T getNewInstance(
RegistrationRequest request, GridRegistry registry) {
try {
String proxyClass = request.getConfiguration().proxy;
if (proxyClass == null) {
log.fine("No proxy class. Using default");
proxyClass = BaseRemoteProxy.class.getCanonicalName();
}
Class<?> clazz = Class.forName(proxyClass);
log.fine("Using class " + clazz.getName());
Object[] args = new Object[]{request, registry};
Class<?>[] argsClass = new Class[]{RegistrationRequest.class, GridRegistry.class};
Constructor<?> c = clazz.getConstructor(argsClass);
Object proxy = c.newInstance(args);
if (proxy instanceof RemoteProxy) {
((RemoteProxy) proxy).setupTimeoutListener();
return (T) proxy;
}
throw new InvalidParameterException("Error: " + proxy.getClass() + " isn't a remote proxy");
} catch (InvocationTargetException e) {
throw new InvalidParameterException("Error: " + e.getTargetException().getMessage());
} catch (Exception e) {
throw new InvalidParameterException("Error: " + e.getMessage());
}
} } | public class class_name {
@SuppressWarnings("unchecked")
public static <T extends RemoteProxy> T getNewInstance(
RegistrationRequest request, GridRegistry registry) {
try {
String proxyClass = request.getConfiguration().proxy;
if (proxyClass == null) {
log.fine("No proxy class. Using default"); // depends on control dependency: [if], data = [none]
proxyClass = BaseRemoteProxy.class.getCanonicalName(); // depends on control dependency: [if], data = [none]
}
Class<?> clazz = Class.forName(proxyClass);
log.fine("Using class " + clazz.getName());
Object[] args = new Object[]{request, registry};
Class<?>[] argsClass = new Class[]{RegistrationRequest.class, GridRegistry.class};
Constructor<?> c = clazz.getConstructor(argsClass);
Object proxy = c.newInstance(args);
if (proxy instanceof RemoteProxy) {
((RemoteProxy) proxy).setupTimeoutListener();
return (T) proxy;
}
throw new InvalidParameterException("Error: " + proxy.getClass() + " isn't a remote proxy");
} catch (InvocationTargetException e) {
throw new InvalidParameterException("Error: " + e.getTargetException().getMessage());
} catch (Exception e) {
throw new InvalidParameterException("Error: " + e.getMessage());
}
} } |
public class class_name {
private ArtifactNotification collectNotificationsForPrefix(String prefix, Set<String> paths) {
Set<String> gatheredPaths = new HashSet<String>();
if ("/".equals(prefix)) {
gatheredPaths.addAll(paths);
} else {
for (String path : paths) {
if (path.startsWith(prefix + "/") || path.equals(prefix))
gatheredPaths.add(path);
}
}
return new DefaultArtifactNotification(root, gatheredPaths);
} } | public class class_name {
private ArtifactNotification collectNotificationsForPrefix(String prefix, Set<String> paths) {
Set<String> gatheredPaths = new HashSet<String>();
if ("/".equals(prefix)) {
gatheredPaths.addAll(paths); // depends on control dependency: [if], data = [none]
} else {
for (String path : paths) {
if (path.startsWith(prefix + "/") || path.equals(prefix))
gatheredPaths.add(path);
}
}
return new DefaultArtifactNotification(root, gatheredPaths);
} } |
public class class_name {
@Override
public boolean configure(final FeatureContext context) {
final Configuration config = context.getConfiguration();
if (!config.isRegistered(EntityFieldsProcessor.class)) {
// register EntityFilteringFeature
if (!config.isRegistered(EntityFilteringFeature.class)) {
context.register(EntityFilteringFeature.class);
}
// Entity Processors.
context.register(EntityFieldsProcessor.class);
// Scope Resolver.
context.register(EntityFieldsScopeResolver.class);
return true;
}
return false;
} } | public class class_name {
@Override
public boolean configure(final FeatureContext context) {
final Configuration config = context.getConfiguration();
if (!config.isRegistered(EntityFieldsProcessor.class)) {
// register EntityFilteringFeature
if (!config.isRegistered(EntityFilteringFeature.class)) {
context.register(EntityFilteringFeature.class); // depends on control dependency: [if], data = [none]
}
// Entity Processors.
context.register(EntityFieldsProcessor.class); // depends on control dependency: [if], data = [none]
// Scope Resolver.
context.register(EntityFieldsScopeResolver.class); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
public void removeGroupOfNodesAndDataSize(String repositoryName, String workspaceName, String patternPath)
{
removeGroupOfNodesQuota(repositoryName, workspaceName, patternPath);
// removes data size for all nodes matched by pattern
// only if only quota was not set explicitly
for (String nodePath : getAllTrackedNodes(repositoryName, workspaceName))
{
if (PathPatternUtils.acceptName(patternPath, nodePath))
{
try
{
getNodeQuota(repositoryName, workspaceName, nodePath);
}
catch (UnknownQuotaLimitException e)
{
removeNodeDataSize(repositoryName, workspaceName, nodePath);
}
}
}
} } | public class class_name {
public void removeGroupOfNodesAndDataSize(String repositoryName, String workspaceName, String patternPath)
{
removeGroupOfNodesQuota(repositoryName, workspaceName, patternPath);
// removes data size for all nodes matched by pattern
// only if only quota was not set explicitly
for (String nodePath : getAllTrackedNodes(repositoryName, workspaceName))
{
if (PathPatternUtils.acceptName(patternPath, nodePath))
{
try
{
getNodeQuota(repositoryName, workspaceName, nodePath); // depends on control dependency: [try], data = [none]
}
catch (UnknownQuotaLimitException e)
{
removeNodeDataSize(repositoryName, workspaceName, nodePath);
} // depends on control dependency: [catch], data = [none]
}
}
} } |
public class class_name {
@SuppressWarnings("deprecation")
private void cancelRequestAndWorkers() {
for (ActorRef worker : workers.values()) {
if (worker != null && !worker.isTerminated()) {
worker.tell(OperationWorkerMsgType.CANCEL, getSelf());
}
}
logger.info("ExecutionManager sending cancelPendingRequest at time: "
+ PcDateUtils.getNowDateTimeStr());
} } | public class class_name {
@SuppressWarnings("deprecation")
private void cancelRequestAndWorkers() {
for (ActorRef worker : workers.values()) {
if (worker != null && !worker.isTerminated()) {
worker.tell(OperationWorkerMsgType.CANCEL, getSelf()); // depends on control dependency: [if], data = [none]
}
}
logger.info("ExecutionManager sending cancelPendingRequest at time: "
+ PcDateUtils.getNowDateTimeStr());
} } |
public class class_name {
private LocalEnvironment joinEnvironments(Context... contexts) {
//
Context head = contexts[0];
GlobalEnvironment global = head.getEnvironment().getParent();
HashSet<WyilFile.Decl.Variable> modified = new HashSet<>();
HashSet<WyilFile.Decl.Variable> deleted = new HashSet<>();
Map<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> headLocals = head.environment.locals;
// Compute the modified and deleted sets
for (int i = 1; i < contexts.length; ++i) {
Context ithContext = contexts[i];
Map<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> ithLocals = ithContext.environment.locals;
// First check env against head
for (Map.Entry<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> e : ithLocals.entrySet()) {
WyilFile.Decl.Variable key = e.getKey();
WyalFile.VariableDeclaration s1 = e.getValue();
WyalFile.VariableDeclaration s2 = headLocals.get(key);
if (s1 == null) {
deleted.add(key);
} else if (!s1.equals(s2)) {
modified.add(key);
}
}
// Second, check head against env
for (Map.Entry<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> e : headLocals.entrySet()) {
WyilFile.Decl.Variable key = e.getKey();
WyalFile.VariableDeclaration s1 = e.getValue();
WyalFile.VariableDeclaration s2 = ithLocals.get(key);
if (s1 == null) {
deleted.add(key);
} else if (!s1.equals(s2)) {
modified.add(key);
}
}
}
// Finally, construct the combined local map
IdentityHashMap<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> combinedLocals = new IdentityHashMap<>();
for (Map.Entry<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> e : headLocals.entrySet()) {
WyilFile.Decl.Variable key = e.getKey();
WyalFile.VariableDeclaration value = e.getValue();
if (deleted.contains(key)) {
// Ignore this entry. This must be checked before we look at
// modified (since variable can be marked both).
continue;
} else if (modified.contains(key)) {
// Update version number
value = global.allocateVersion(key);
}
combinedLocals.put(key, value);
}
// Now, use the modified and deleted sets to build the new environment
return new LocalEnvironment(global, combinedLocals);
} } | public class class_name {
private LocalEnvironment joinEnvironments(Context... contexts) {
//
Context head = contexts[0];
GlobalEnvironment global = head.getEnvironment().getParent();
HashSet<WyilFile.Decl.Variable> modified = new HashSet<>();
HashSet<WyilFile.Decl.Variable> deleted = new HashSet<>();
Map<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> headLocals = head.environment.locals;
// Compute the modified and deleted sets
for (int i = 1; i < contexts.length; ++i) {
Context ithContext = contexts[i];
Map<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> ithLocals = ithContext.environment.locals;
// First check env against head
for (Map.Entry<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> e : ithLocals.entrySet()) {
WyilFile.Decl.Variable key = e.getKey();
WyalFile.VariableDeclaration s1 = e.getValue();
WyalFile.VariableDeclaration s2 = headLocals.get(key);
if (s1 == null) {
deleted.add(key); // depends on control dependency: [if], data = [none]
} else if (!s1.equals(s2)) {
modified.add(key); // depends on control dependency: [if], data = [none]
}
}
// Second, check head against env
for (Map.Entry<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> e : headLocals.entrySet()) {
WyilFile.Decl.Variable key = e.getKey();
WyalFile.VariableDeclaration s1 = e.getValue();
WyalFile.VariableDeclaration s2 = ithLocals.get(key);
if (s1 == null) {
deleted.add(key); // depends on control dependency: [if], data = [none]
} else if (!s1.equals(s2)) {
modified.add(key); // depends on control dependency: [if], data = [none]
}
}
}
// Finally, construct the combined local map
IdentityHashMap<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> combinedLocals = new IdentityHashMap<>();
for (Map.Entry<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> e : headLocals.entrySet()) {
WyilFile.Decl.Variable key = e.getKey();
WyalFile.VariableDeclaration value = e.getValue();
if (deleted.contains(key)) {
// Ignore this entry. This must be checked before we look at
// modified (since variable can be marked both).
continue;
} else if (modified.contains(key)) {
// Update version number
value = global.allocateVersion(key); // depends on control dependency: [if], data = [none]
}
combinedLocals.put(key, value); // depends on control dependency: [for], data = [e]
}
// Now, use the modified and deleted sets to build the new environment
return new LocalEnvironment(global, combinedLocals);
} } |
public class class_name {
public StartRemediationExecutionRequest withResourceKeys(ResourceKey... resourceKeys) {
if (this.resourceKeys == null) {
setResourceKeys(new com.amazonaws.internal.SdkInternalList<ResourceKey>(resourceKeys.length));
}
for (ResourceKey ele : resourceKeys) {
this.resourceKeys.add(ele);
}
return this;
} } | public class class_name {
public StartRemediationExecutionRequest withResourceKeys(ResourceKey... resourceKeys) {
if (this.resourceKeys == null) {
setResourceKeys(new com.amazonaws.internal.SdkInternalList<ResourceKey>(resourceKeys.length)); // depends on control dependency: [if], data = [none]
}
for (ResourceKey ele : resourceKeys) {
this.resourceKeys.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public static byte[] toByteArray(String value) {
if (value == null) {
return null;
}
try {
CharsetEncoder encoder = Charset.forName("UTF-8").newEncoder().onMalformedInput(CodingErrorAction.REPORT)
.onUnmappableCharacter(CodingErrorAction.REPORT);
ByteBuffer buf = encoder.encode(CharBuffer.wrap(value));
// don't use ByteBuffer.array(), as it returns internal, and
// possibly larger, byte array
byte[] res = new byte[buf.remaining()];
buf.get(res);
return res;
} catch (CharacterCodingException e) {
throw new RuntimeException("Unexpected exception", e);
}
} } | public class class_name {
public static byte[] toByteArray(String value) {
if (value == null) {
return null; // depends on control dependency: [if], data = [none]
}
try {
CharsetEncoder encoder = Charset.forName("UTF-8").newEncoder().onMalformedInput(CodingErrorAction.REPORT)
.onUnmappableCharacter(CodingErrorAction.REPORT);
ByteBuffer buf = encoder.encode(CharBuffer.wrap(value));
// don't use ByteBuffer.array(), as it returns internal, and
// possibly larger, byte array
byte[] res = new byte[buf.remaining()];
buf.get(res); // depends on control dependency: [try], data = [none]
return res; // depends on control dependency: [try], data = [none]
} catch (CharacterCodingException e) {
throw new RuntimeException("Unexpected exception", e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void enableOverride(int overrideId, int pathId, String clientUUID) throws Exception {
// get profileId from pathId
int profileId = PathOverrideService.getInstance().getPath(pathId).getProfileId();
int newPriority = 0;
// we want to limit -1, -2 to only be added once since they are the Custom responses/requests
if (overrideId == Constants.PLUGIN_REQUEST_OVERRIDE_CUSTOM) {
if (this.getEnabledEndpoint(pathId, overrideId, null, clientUUID) != null) {
return;
}
}
// need to first determine the highest enabled order value for this path
HashMap<String, Object> priorities = sqlService.getFirstResult(
"SELECT * FROM " + Constants.DB_TABLE_ENABLED_OVERRIDE +
" WHERE " + Constants.REQUEST_RESPONSE_PATH_ID + "=" + pathId +
" AND " + Constants.GENERIC_CLIENT_UUID + "='" + clientUUID +
"' ORDER BY + " + Constants.ENABLED_OVERRIDES_PRIORITY + " DESC"
);
if (priorities != null) {
newPriority = Integer.valueOf(priorities.get(Constants.ENABLED_OVERRIDES_PRIORITY.toUpperCase()).toString()) + 1;
}
PreparedStatement statement = null;
try (Connection sqlConnection = sqlService.getConnection()) {
PreparedStatement query = null;
ResultSet results = null;
SQLService sqlService = SQLService.getInstance();
com.groupon.odo.proxylib.models.Method method = null;
query = sqlConnection.prepareStatement(
"SELECT * FROM " + Constants.DB_TABLE_OVERRIDE +
" WHERE " + Constants.GENERIC_ID + " = ?"
);
query.setString(1, String.valueOf(overrideId));
results = query.executeQuery();
JSONSerializer serializer = new JSONSerializer();
if (results.next()) {
String className = results.getString(Constants.OVERRIDE_CLASS_NAME);
String methodName = results.getString(Constants.OVERRIDE_METHOD_NAME);
method = PluginManager.getInstance().getMethod(className, methodName);
}
statement = sqlConnection.prepareStatement(
"INSERT INTO " + Constants.DB_TABLE_ENABLED_OVERRIDE +
"(" + Constants.GENERIC_PROFILE_ID + "," + Constants.GENERIC_CLIENT_UUID + "," +
Constants.REQUEST_RESPONSE_PATH_ID + "," + Constants.ENABLED_OVERRIDES_OVERRIDE_ID + "," +
Constants.ENABLED_OVERRIDES_PRIORITY + "," + Constants.ENABLED_OVERRIDES_ARGUMENTS + "," +
Constants.ENABLED_OVERRIDES_RESPONSE_CODE + ")" +
" VALUES (?, ?, ?, ?, ?, ?, ?);"
);
statement.setInt(1, profileId);
statement.setString(2, clientUUID);
statement.setInt(3, pathId);
statement.setInt(4, overrideId);
statement.setInt(5, newPriority);
if (method == null) {
statement.setString(6, "");
} else {
ArrayList<String> argDefaults = new ArrayList<String>();
for (int i = 0; i < method.getMethodArguments().length; i++) {
if (i < method.getMethodDefaultArguments().length && method.getMethodDefaultArguments()[i] != null) {
argDefaults.add(String.valueOf(method.getMethodDefaultArguments()[i]));
} else {
argDefaults.add("");
}
}
statement.setString(6, serializer.serialize(argDefaults));
}
statement.setString(7,"200");
statement.executeUpdate();
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (statement != null) {
statement.close();
}
} catch (Exception e) {
}
}
} } | public class class_name {
public void enableOverride(int overrideId, int pathId, String clientUUID) throws Exception {
// get profileId from pathId
int profileId = PathOverrideService.getInstance().getPath(pathId).getProfileId();
int newPriority = 0;
// we want to limit -1, -2 to only be added once since they are the Custom responses/requests
if (overrideId == Constants.PLUGIN_REQUEST_OVERRIDE_CUSTOM) {
if (this.getEnabledEndpoint(pathId, overrideId, null, clientUUID) != null) {
return; // depends on control dependency: [if], data = [none]
}
}
// need to first determine the highest enabled order value for this path
HashMap<String, Object> priorities = sqlService.getFirstResult(
"SELECT * FROM " + Constants.DB_TABLE_ENABLED_OVERRIDE +
" WHERE " + Constants.REQUEST_RESPONSE_PATH_ID + "=" + pathId +
" AND " + Constants.GENERIC_CLIENT_UUID + "='" + clientUUID +
"' ORDER BY + " + Constants.ENABLED_OVERRIDES_PRIORITY + " DESC"
);
if (priorities != null) {
newPriority = Integer.valueOf(priorities.get(Constants.ENABLED_OVERRIDES_PRIORITY.toUpperCase()).toString()) + 1;
}
PreparedStatement statement = null;
try (Connection sqlConnection = sqlService.getConnection()) {
PreparedStatement query = null;
ResultSet results = null;
SQLService sqlService = SQLService.getInstance();
com.groupon.odo.proxylib.models.Method method = null;
query = sqlConnection.prepareStatement(
"SELECT * FROM " + Constants.DB_TABLE_OVERRIDE +
" WHERE " + Constants.GENERIC_ID + " = ?"
);
query.setString(1, String.valueOf(overrideId));
results = query.executeQuery();
JSONSerializer serializer = new JSONSerializer();
if (results.next()) {
String className = results.getString(Constants.OVERRIDE_CLASS_NAME);
String methodName = results.getString(Constants.OVERRIDE_METHOD_NAME);
method = PluginManager.getInstance().getMethod(className, methodName);
}
statement = sqlConnection.prepareStatement(
"INSERT INTO " + Constants.DB_TABLE_ENABLED_OVERRIDE +
"(" + Constants.GENERIC_PROFILE_ID + "," + Constants.GENERIC_CLIENT_UUID + "," +
Constants.REQUEST_RESPONSE_PATH_ID + "," + Constants.ENABLED_OVERRIDES_OVERRIDE_ID + "," +
Constants.ENABLED_OVERRIDES_PRIORITY + "," + Constants.ENABLED_OVERRIDES_ARGUMENTS + "," +
Constants.ENABLED_OVERRIDES_RESPONSE_CODE + ")" +
" VALUES (?, ?, ?, ?, ?, ?, ?);"
);
statement.setInt(1, profileId);
statement.setString(2, clientUUID);
statement.setInt(3, pathId);
statement.setInt(4, overrideId);
statement.setInt(5, newPriority);
if (method == null) {
statement.setString(6, "");
} else {
ArrayList<String> argDefaults = new ArrayList<String>();
for (int i = 0; i < method.getMethodArguments().length; i++) {
if (i < method.getMethodDefaultArguments().length && method.getMethodDefaultArguments()[i] != null) {
argDefaults.add(String.valueOf(method.getMethodDefaultArguments()[i])); // depends on control dependency: [if], data = [none]
} else {
argDefaults.add(""); // depends on control dependency: [if], data = [none]
}
}
statement.setString(6, serializer.serialize(argDefaults));
}
statement.setString(7,"200");
statement.executeUpdate();
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (statement != null) {
statement.close();
}
} catch (Exception e) {
}
}
} } |
public class class_name {
public Object getProperty(Class aClass, Object object, String property, boolean b, boolean b1) {
if (null == interceptor) {
return super.getProperty(aClass, object, property, b, b1);
}
if (interceptor instanceof PropertyAccessInterceptor) {
PropertyAccessInterceptor pae = (PropertyAccessInterceptor) interceptor;
Object result = pae.beforeGet(object, property);
if (interceptor.doInvoke()) {
result = super.getProperty(aClass, object, property, b, b1);
}
return result;
}
return super.getProperty(aClass, object, property, b, b1);
} } | public class class_name {
public Object getProperty(Class aClass, Object object, String property, boolean b, boolean b1) {
if (null == interceptor) {
return super.getProperty(aClass, object, property, b, b1); // depends on control dependency: [if], data = [none]
}
if (interceptor instanceof PropertyAccessInterceptor) {
PropertyAccessInterceptor pae = (PropertyAccessInterceptor) interceptor;
Object result = pae.beforeGet(object, property);
if (interceptor.doInvoke()) {
result = super.getProperty(aClass, object, property, b, b1); // depends on control dependency: [if], data = [none]
}
return result; // depends on control dependency: [if], data = [none]
}
return super.getProperty(aClass, object, property, b, b1);
} } |
public class class_name {
public HttpCookie getCookie(String name) {
if (null == name || 0 == this.parsedList.size()) {
return null;
}
for (HttpCookie cookie : this.parsedList) {
if (cookie.getName().equals(name)) {
return cookie;
}
}
return null;
} } | public class class_name {
public HttpCookie getCookie(String name) {
if (null == name || 0 == this.parsedList.size()) {
return null; // depends on control dependency: [if], data = [none]
}
for (HttpCookie cookie : this.parsedList) {
if (cookie.getName().equals(name)) {
return cookie; // depends on control dependency: [if], data = [none]
}
}
return null;
} } |
public class class_name {
public boolean iAmSubscribedTo(Jid jid) {
if (jid == null) {
return false;
}
BareJid bareJid = jid.asBareJid();
RosterEntry entry = getEntry(bareJid);
if (entry == null) {
return false;
}
return entry.canSeeHisPresence();
} } | public class class_name {
public boolean iAmSubscribedTo(Jid jid) {
if (jid == null) {
return false; // depends on control dependency: [if], data = [none]
}
BareJid bareJid = jid.asBareJid();
RosterEntry entry = getEntry(bareJid);
if (entry == null) {
return false; // depends on control dependency: [if], data = [none]
}
return entry.canSeeHisPresence();
} } |
public class class_name {
public void writeSectionDefinition(final OutputStream result)
{
try {
RtfHeaderFooterGroup header = convertHeaderFooter(this.header, RtfHeaderFooter.TYPE_HEADER);
RtfHeaderFooterGroup footer = convertHeaderFooter(this.footer, RtfHeaderFooter.TYPE_FOOTER);
if(header.hasTitlePage() || footer.hasTitlePage()) {
result.write(TITLE_PAGE);
header.setHasTitlePage();
footer.setHasTitlePage();
}
if(header.hasFacingPages() || footer.hasFacingPages()) {
result.write(FACING_PAGES);
header.setHasFacingPages();
footer.setHasFacingPages();
}
footer.writeContent(result);
header.writeContent(result);
pageSetting.writeSectionDefinition(result);
} catch(IOException ioe) {
ioe.printStackTrace();
}
} } | public class class_name {
public void writeSectionDefinition(final OutputStream result)
{
try {
RtfHeaderFooterGroup header = convertHeaderFooter(this.header, RtfHeaderFooter.TYPE_HEADER);
RtfHeaderFooterGroup footer = convertHeaderFooter(this.footer, RtfHeaderFooter.TYPE_FOOTER);
if(header.hasTitlePage() || footer.hasTitlePage()) {
result.write(TITLE_PAGE); // depends on control dependency: [if], data = [none]
header.setHasTitlePage(); // depends on control dependency: [if], data = [none]
footer.setHasTitlePage(); // depends on control dependency: [if], data = [none]
}
if(header.hasFacingPages() || footer.hasFacingPages()) {
result.write(FACING_PAGES); // depends on control dependency: [if], data = [none]
header.setHasFacingPages(); // depends on control dependency: [if], data = [none]
footer.setHasFacingPages(); // depends on control dependency: [if], data = [none]
}
footer.writeContent(result); // depends on control dependency: [try], data = [none]
header.writeContent(result); // depends on control dependency: [try], data = [none]
pageSetting.writeSectionDefinition(result); // depends on control dependency: [try], data = [none]
} catch(IOException ioe) {
ioe.printStackTrace();
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public boolean isInState(JComponent c) {
if (c != null && c.getParent() != null) {
SeaGlassScrollBarUI ui = (SeaGlassScrollBarUI) ((JScrollBar) c.getParent()).getUI();
if (ui.getIncreaseButton() == c) {
return true;
}
}
return false;
} } | public class class_name {
public boolean isInState(JComponent c) {
if (c != null && c.getParent() != null) {
SeaGlassScrollBarUI ui = (SeaGlassScrollBarUI) ((JScrollBar) c.getParent()).getUI();
if (ui.getIncreaseButton() == c) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
@Override
public void onStart(ITestContext context) {
logger.entering(context);
if (ListenerManager.isCurrentMethodSkipped(this)) {
logger.exiting(ListenerManager.THREAD_EXCLUSION_MSG);
return;
}
String testName = context.getCurrentXmlTest().getName();
// initializing the ConfigSummaryData before initializers so that config details can be added.
ConfigSummaryData.initLocalConfigSummary(testName);
// We have to ensure that our configuration is the first thing that gets loaded.
// Our loading mechanism is going to be via listeners. But the problem with TestNG listeners is
// that the order of loading is never guaranteed. Things become complicated if our downstream consumers
// want to piggy back on our configuration and want to have their configurations loaded as well.
// Because of all these issues, we cannot rely on building a config specific listener.
// So we are relying on the ServiceLoaders in Java to do it for us. The moment we are here, we ensure that
// not only the SeLion specific configurations are loaded, but all downstream consumer's configurations
// are loaded as well along with us. So SeLion now becomes the single point of initialization and thus
// does away with all the inherent setbacks that are associated with TestNG listeners orders.
invokeInitializersBasedOnPriority(context);
ConfigManager.printConfiguration(testName);
ISuite suite = context.getSuite();
if (!suite.getParallel().equals("false") && logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE, "Parallel suite execution. Updating SeLion local config for Test, "
+ context.getCurrentXmlTest().getName());
}
String base = suite.getOutputDirectory();
String suiteName = suite.getName();
String rootFolder = filterOutputDirectory(base, suiteName);
SeLionReporter.setTestNGOutputFolder(rootFolder);
SeLionReporter.init();
logger.exiting();
} } | public class class_name {
@Override
public void onStart(ITestContext context) {
logger.entering(context);
if (ListenerManager.isCurrentMethodSkipped(this)) {
logger.exiting(ListenerManager.THREAD_EXCLUSION_MSG); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
String testName = context.getCurrentXmlTest().getName();
// initializing the ConfigSummaryData before initializers so that config details can be added.
ConfigSummaryData.initLocalConfigSummary(testName);
// We have to ensure that our configuration is the first thing that gets loaded.
// Our loading mechanism is going to be via listeners. But the problem with TestNG listeners is
// that the order of loading is never guaranteed. Things become complicated if our downstream consumers
// want to piggy back on our configuration and want to have their configurations loaded as well.
// Because of all these issues, we cannot rely on building a config specific listener.
// So we are relying on the ServiceLoaders in Java to do it for us. The moment we are here, we ensure that
// not only the SeLion specific configurations are loaded, but all downstream consumer's configurations
// are loaded as well along with us. So SeLion now becomes the single point of initialization and thus
// does away with all the inherent setbacks that are associated with TestNG listeners orders.
invokeInitializersBasedOnPriority(context);
ConfigManager.printConfiguration(testName);
ISuite suite = context.getSuite();
if (!suite.getParallel().equals("false") && logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE, "Parallel suite execution. Updating SeLion local config for Test, "
+ context.getCurrentXmlTest().getName()); // depends on control dependency: [if], data = [none]
}
String base = suite.getOutputDirectory();
String suiteName = suite.getName();
String rootFolder = filterOutputDirectory(base, suiteName);
SeLionReporter.setTestNGOutputFolder(rootFolder);
SeLionReporter.init();
logger.exiting();
} } |
public class class_name {
private ScanPlan createPlan(String scanId, ScanOptions options) {
ScanPlan plan = new ScanPlan(scanId, options);
for (String placement : options.getPlacements()) {
String cluster = _dataTools.getPlacementCluster(placement);
ScanRangeSplits scanRangeSplits = _dataTools.getScanRangeSplits(placement, options.getRangeScanSplitSize(), Optional.<ScanRange>absent());
if (!options.isScanByAZ()) {
// Optionally we can reduce load across the ring by limiting scans AZ at a time. However, the caller
// has requested to scan all token ranges as quickly as possible, so collapse all token ranges into a
// single group.
scanRangeSplits = scanRangeSplits.combineGroups();
}
for (ScanRangeSplits.SplitGroup splitGroup : scanRangeSplits.getSplitGroups()) {
// Start a new batch, indicating the subsequent token ranges can be scanned in parallel
plan.startNewBatchForCluster(cluster);
// Add the scan ranges associated with each token range in the split group to the batch
for (ScanRangeSplits.TokenRange tokenRange : splitGroup.getTokenRanges()) {
plan.addTokenRangeToCurrentBatchForCluster(cluster, placement, tokenRange.getScanRanges());
}
}
}
return plan;
} } | public class class_name {
private ScanPlan createPlan(String scanId, ScanOptions options) {
ScanPlan plan = new ScanPlan(scanId, options);
for (String placement : options.getPlacements()) {
String cluster = _dataTools.getPlacementCluster(placement);
ScanRangeSplits scanRangeSplits = _dataTools.getScanRangeSplits(placement, options.getRangeScanSplitSize(), Optional.<ScanRange>absent());
if (!options.isScanByAZ()) {
// Optionally we can reduce load across the ring by limiting scans AZ at a time. However, the caller
// has requested to scan all token ranges as quickly as possible, so collapse all token ranges into a
// single group.
scanRangeSplits = scanRangeSplits.combineGroups(); // depends on control dependency: [if], data = [none]
}
for (ScanRangeSplits.SplitGroup splitGroup : scanRangeSplits.getSplitGroups()) {
// Start a new batch, indicating the subsequent token ranges can be scanned in parallel
plan.startNewBatchForCluster(cluster); // depends on control dependency: [for], data = [none]
// Add the scan ranges associated with each token range in the split group to the batch
for (ScanRangeSplits.TokenRange tokenRange : splitGroup.getTokenRanges()) {
plan.addTokenRangeToCurrentBatchForCluster(cluster, placement, tokenRange.getScanRanges()); // depends on control dependency: [for], data = [tokenRange]
}
}
}
return plan;
} } |
public class class_name {
public TypeMetadata combineAll(TypeMetadata other) {
Assert.checkNonNull(other);
TypeMetadata out = new TypeMetadata();
Set<Entry.Kind> keys = new HashSet<>(contents.keySet());
keys.addAll(other.contents.keySet());
for(Entry.Kind key : keys) {
if (contents.containsKey(key)) {
if (other.contents.containsKey(key)) {
out.add(key, contents.get(key).combine(other.contents.get(key)));
} else {
out.add(key, contents.get(key));
}
} else if (other.contents.containsKey(key)) {
out.add(key, other.contents.get(key));
}
}
return out;
} } | public class class_name {
public TypeMetadata combineAll(TypeMetadata other) {
Assert.checkNonNull(other);
TypeMetadata out = new TypeMetadata();
Set<Entry.Kind> keys = new HashSet<>(contents.keySet());
keys.addAll(other.contents.keySet());
for(Entry.Kind key : keys) {
if (contents.containsKey(key)) {
if (other.contents.containsKey(key)) {
out.add(key, contents.get(key).combine(other.contents.get(key))); // depends on control dependency: [if], data = [none]
} else {
out.add(key, contents.get(key)); // depends on control dependency: [if], data = [none]
}
} else if (other.contents.containsKey(key)) {
out.add(key, other.contents.get(key)); // depends on control dependency: [if], data = [none]
}
}
return out;
} } |
public class class_name {
public void addError(final String message) {
if (config.collectErrors) {
if (errors == null) {
errors = new ArrayList<>();
}
errors.add(message);
}
} } | public class class_name {
public void addError(final String message) {
if (config.collectErrors) {
if (errors == null) {
errors = new ArrayList<>(); // depends on control dependency: [if], data = [none]
}
errors.add(message); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public <CTX> SafeHtml sanitize(
@Nullable String html,
@Nullable HtmlChangeListener<CTX> listener, @Nullable CTX context) {
if (html == null) { return SafeHtml.EMPTY; }
return UncheckedConversions.safeHtmlFromStringKnownToSatisfyTypeContract(
f.sanitize(html, listener, context));
} } | public class class_name {
public <CTX> SafeHtml sanitize(
@Nullable String html,
@Nullable HtmlChangeListener<CTX> listener, @Nullable CTX context) {
if (html == null) { return SafeHtml.EMPTY; } // depends on control dependency: [if], data = [none]
return UncheckedConversions.safeHtmlFromStringKnownToSatisfyTypeContract(
f.sanitize(html, listener, context));
} } |
public class class_name {
@Override
public synchronized void onProgressUpdate(float progress) {
if (mIsFinished) {
return;
}
try {
onProgressUpdateImpl(progress);
} catch (Exception e) {
onUnhandledException(e);
}
} } | public class class_name {
@Override
public synchronized void onProgressUpdate(float progress) {
if (mIsFinished) {
return; // depends on control dependency: [if], data = [none]
}
try {
onProgressUpdateImpl(progress); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
onUnhandledException(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public synchronized CompilerResults process() {
// Create the list to hold all of the exceptions.
Set<Throwable> exceptions = new TreeSet<Throwable>(new ThrowableComparator());
long start = new Date().getTime();
stats.setFileCount(files.size());
// Trigger the compilation of the templates via the template cache. If
// no building is going on, then the compile() method is used which
// doesn't actually save the templates. This reduces drastically the
// memory requirements.
if (options.formatters.size() > 0) {
for (File f : files) {
ccache.retrieve(f.getAbsolutePath(), false);
}
} else {
// FIXME: Determine if this does the correct thing (nothing) for a syntax check.
for (File f : files) {
if (!f.isAbsolute() && options.annotationBaseDirectory != null) {
f = new File(options.annotationBaseDirectory, f.getPath());
}
ccache.compile(f.getAbsolutePath());
}
}
// Now continually loop through the result queue until we have all
// of the results.
while (remainingTasks.get() > 0) {
try {
Future<? extends TaskResult> future = resultsQueue.take();
try {
stats.incrementFinishedTasks(future.get().type);
} catch (ExecutionException ee) {
exceptions.add(ee.getCause());
}
remainingTasks.decrementAndGet();
stats.updateMemoryInfo();
} catch (InterruptedException consumed) {
}
}
// Shutdown the executors. In certain environments (e.g. eclipse) the
// required "modifyThread" permission may not have been granted. Not
// having this permission may cause a thread leak.
try {
SecurityManager security = System.getSecurityManager();
if (security != null) {
security.checkPermission(new RuntimePermission("modifyThread"));
}
// We've got the correct permission, so tell all of the executors to
// shutdown.
for (TaskResult.ResultType t : TaskResult.ResultType.values()) {
executors.get(t).shutdown();
}
} catch (SecurityException se) {
// Emit a warning about the missing permission.
System.err.println("WARNING: missing modifyThread permission");
}
// Finalize the statistics.
long end = new Date().getTime();
stats.setBuildTime(end - start);
return new CompilerResults(stats, exceptions);
} } | public class class_name {
public synchronized CompilerResults process() {
// Create the list to hold all of the exceptions.
Set<Throwable> exceptions = new TreeSet<Throwable>(new ThrowableComparator());
long start = new Date().getTime();
stats.setFileCount(files.size());
// Trigger the compilation of the templates via the template cache. If
// no building is going on, then the compile() method is used which
// doesn't actually save the templates. This reduces drastically the
// memory requirements.
if (options.formatters.size() > 0) {
for (File f : files) {
ccache.retrieve(f.getAbsolutePath(), false); // depends on control dependency: [for], data = [f]
}
} else {
// FIXME: Determine if this does the correct thing (nothing) for a syntax check.
for (File f : files) {
if (!f.isAbsolute() && options.annotationBaseDirectory != null) {
f = new File(options.annotationBaseDirectory, f.getPath()); // depends on control dependency: [if], data = [none]
}
ccache.compile(f.getAbsolutePath()); // depends on control dependency: [for], data = [f]
}
}
// Now continually loop through the result queue until we have all
// of the results.
while (remainingTasks.get() > 0) {
try {
Future<? extends TaskResult> future = resultsQueue.take(); // depends on control dependency: [try], data = [none]
try {
stats.incrementFinishedTasks(future.get().type); // depends on control dependency: [try], data = [none]
} catch (ExecutionException ee) {
exceptions.add(ee.getCause());
} // depends on control dependency: [catch], data = [none]
remainingTasks.decrementAndGet(); // depends on control dependency: [try], data = [none]
stats.updateMemoryInfo(); // depends on control dependency: [try], data = [none]
} catch (InterruptedException consumed) {
} // depends on control dependency: [catch], data = [none]
}
// Shutdown the executors. In certain environments (e.g. eclipse) the
// required "modifyThread" permission may not have been granted. Not
// having this permission may cause a thread leak.
try {
SecurityManager security = System.getSecurityManager();
if (security != null) {
security.checkPermission(new RuntimePermission("modifyThread")); // depends on control dependency: [if], data = [none]
}
// We've got the correct permission, so tell all of the executors to
// shutdown.
for (TaskResult.ResultType t : TaskResult.ResultType.values()) {
executors.get(t).shutdown(); // depends on control dependency: [for], data = [t]
}
} catch (SecurityException se) {
// Emit a warning about the missing permission.
System.err.println("WARNING: missing modifyThread permission");
} // depends on control dependency: [catch], data = [none]
// Finalize the statistics.
long end = new Date().getTime();
stats.setBuildTime(end - start);
return new CompilerResults(stats, exceptions);
} } |
public class class_name {
private void flush() {
try {
parent.append(future.get(60, TimeUnit.SECONDS).collect(this));
parent.append(buffer);
if (parent instanceof AsyncAppendable) {
((AsyncAppendable) parent).flush();
}
} catch (Exception e) {
throw new MustacheException(
MustacheProblem.RENDER_ASYNC_PROCESSING_ERROR, e);
}
} } | public class class_name {
private void flush() {
try {
parent.append(future.get(60, TimeUnit.SECONDS).collect(this)); // depends on control dependency: [try], data = [none]
parent.append(buffer); // depends on control dependency: [try], data = [none]
if (parent instanceof AsyncAppendable) {
((AsyncAppendable) parent).flush(); // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
throw new MustacheException(
MustacheProblem.RENDER_ASYNC_PROCESSING_ERROR, e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public boolean exists(String resource, String[] paths) throws IOException {
for (ResourceLoader loader : loaders) {
if (loader.exists(resource, paths)) {
return true;
}
}
return false;
} } | public class class_name {
@Override
public boolean exists(String resource, String[] paths) throws IOException {
for (ResourceLoader loader : loaders) {
if (loader.exists(resource, paths)) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
private void drawHeatMap(Graphics2D chartGraphics, double[][] data) {
// Calculate the available size for the heatmap.
int noYCells = data.length;
int noXCells = data[0].length;
// double dataMin = min(data);
// double dataMax = max(data);
BufferedImage heatMapImage = new BufferedImage(heatMapSize.width, heatMapSize.height,
BufferedImage.TYPE_INT_ARGB);
Graphics2D heatMapGraphics = heatMapImage.createGraphics();
for (int x = 0; x < noXCells; x++) {
for (int y = 0; y < noYCells; y++) {
// Set colour depending on zValues.
heatMapGraphics.setColor(getCellColour(data[y][x], lowValue, highValue));
int cellX = x * cellSize.width;
int cellY = y * cellSize.height;
heatMapGraphics.fillRect(cellX, cellY, cellSize.width, cellSize.height);
}
}
// Draw the heat map onto the chart.
chartGraphics.drawImage(heatMapImage, heatMapTL.x, heatMapTL.y, heatMapSize.width,
heatMapSize.height, null);
} } | public class class_name {
private void drawHeatMap(Graphics2D chartGraphics, double[][] data) {
// Calculate the available size for the heatmap.
int noYCells = data.length;
int noXCells = data[0].length;
// double dataMin = min(data);
// double dataMax = max(data);
BufferedImage heatMapImage = new BufferedImage(heatMapSize.width, heatMapSize.height,
BufferedImage.TYPE_INT_ARGB);
Graphics2D heatMapGraphics = heatMapImage.createGraphics();
for (int x = 0; x < noXCells; x++) {
for (int y = 0; y < noYCells; y++) {
// Set colour depending on zValues.
heatMapGraphics.setColor(getCellColour(data[y][x], lowValue, highValue)); // depends on control dependency: [for], data = [y]
int cellX = x * cellSize.width;
int cellY = y * cellSize.height;
heatMapGraphics.fillRect(cellX, cellY, cellSize.width, cellSize.height); // depends on control dependency: [for], data = [none]
}
}
// Draw the heat map onto the chart.
chartGraphics.drawImage(heatMapImage, heatMapTL.x, heatMapTL.y, heatMapSize.width,
heatMapSize.height, null);
} } |
public class class_name {
public void printEndRecordGridData(PrintWriter out, int iPrintOptions)
{
out.println("</table>");
if ((iPrintOptions & HtmlConstants.DETAIL_SCREEN) == HtmlConstants.DETAIL_SCREEN)
{
out.println("</td>\n</tr>");
}
} } | public class class_name {
public void printEndRecordGridData(PrintWriter out, int iPrintOptions)
{
out.println("</table>");
if ((iPrintOptions & HtmlConstants.DETAIL_SCREEN) == HtmlConstants.DETAIL_SCREEN)
{
out.println("</td>\n</tr>"); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public int remove(Filter filter)
{
int size = trie.size();
for (Map.Entry<String, V> entry : entrySet())
{
if (filter.remove(entry))
{
trie.remove(entry.getKey());
}
}
return size - trie.size();
} } | public class class_name {
public int remove(Filter filter)
{
int size = trie.size();
for (Map.Entry<String, V> entry : entrySet())
{
if (filter.remove(entry))
{
trie.remove(entry.getKey()); // depends on control dependency: [if], data = [none]
}
}
return size - trie.size();
} } |
public class class_name {
public FormValidation doCheckDisplayName(@QueryParameter String displayName,
@QueryParameter String jobName) {
displayName = displayName.trim();
if(LOGGER.isLoggable(Level.FINE)) {
LOGGER.log(Level.FINE, "Current job name is " + jobName);
}
if(!isNameUnique(displayName, jobName)) {
return FormValidation.warning(Messages.Jenkins_CheckDisplayName_NameNotUniqueWarning(displayName));
}
else if(!isDisplayNameUnique(displayName, jobName)){
return FormValidation.warning(Messages.Jenkins_CheckDisplayName_DisplayNameNotUniqueWarning(displayName));
}
else {
return FormValidation.ok();
}
} } | public class class_name {
public FormValidation doCheckDisplayName(@QueryParameter String displayName,
@QueryParameter String jobName) {
displayName = displayName.trim();
if(LOGGER.isLoggable(Level.FINE)) {
LOGGER.log(Level.FINE, "Current job name is " + jobName); // depends on control dependency: [if], data = [none]
}
if(!isNameUnique(displayName, jobName)) {
return FormValidation.warning(Messages.Jenkins_CheckDisplayName_NameNotUniqueWarning(displayName)); // depends on control dependency: [if], data = [none]
}
else if(!isDisplayNameUnique(displayName, jobName)){
return FormValidation.warning(Messages.Jenkins_CheckDisplayName_DisplayNameNotUniqueWarning(displayName)); // depends on control dependency: [if], data = [none]
}
else {
return FormValidation.ok(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@SuppressWarnings("unchecked")
@Override
public <T> T findOne(String jxQuery, String collectionName) {
CollectionMetaData collectionMeta = cmdMap.get(collectionName);
if((null == collectionMeta) || (!collectionsRef.get().containsKey(collectionName))) {
throw new InvalidJsonDbApiUsageException("Collection by name '" + collectionName + "' not found. Create collection first");
}
collectionMeta.getCollectionLock().readLock().lock();
try {
JXPathContext context = contextsRef.get().get(collectionName);
Iterator<T> resultItr = context.iterate(jxQuery);
while (resultItr.hasNext()) {
T document = resultItr.next();
Object obj = Util.deepCopy(document);
if(encrypted && collectionMeta.hasSecret() && null!= obj){
CryptoUtil.decryptFields(obj, collectionMeta, dbConfig.getCipher());
}
return (T) obj; // Return the first element we find.
}
return null;
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
logger.error("Error when decrypting value for a @Secret annotated field for entity: " + collectionName, e);
throw new JsonDBException("Error when decrypting value for a @Secret annotated field for entity: " + collectionName, e);
} finally {
collectionMeta.getCollectionLock().readLock().unlock();
}
} } | public class class_name {
@SuppressWarnings("unchecked")
@Override
public <T> T findOne(String jxQuery, String collectionName) {
CollectionMetaData collectionMeta = cmdMap.get(collectionName);
if((null == collectionMeta) || (!collectionsRef.get().containsKey(collectionName))) {
throw new InvalidJsonDbApiUsageException("Collection by name '" + collectionName + "' not found. Create collection first");
}
collectionMeta.getCollectionLock().readLock().lock();
try {
JXPathContext context = contextsRef.get().get(collectionName);
Iterator<T> resultItr = context.iterate(jxQuery);
while (resultItr.hasNext()) {
T document = resultItr.next();
Object obj = Util.deepCopy(document);
if(encrypted && collectionMeta.hasSecret() && null!= obj){
CryptoUtil.decryptFields(obj, collectionMeta, dbConfig.getCipher()); // depends on control dependency: [if], data = [none]
}
return (T) obj; // Return the first element we find. // depends on control dependency: [while], data = [none]
}
return null; // depends on control dependency: [try], data = [none]
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
logger.error("Error when decrypting value for a @Secret annotated field for entity: " + collectionName, e);
throw new JsonDBException("Error when decrypting value for a @Secret annotated field for entity: " + collectionName, e);
} finally { // depends on control dependency: [catch], data = [none]
collectionMeta.getCollectionLock().readLock().unlock();
}
} } |
public class class_name {
public static CommerceCurrency toModel(CommerceCurrencySoap soapModel) {
if (soapModel == null) {
return null;
}
CommerceCurrency model = new CommerceCurrencyImpl();
model.setUuid(soapModel.getUuid());
model.setCommerceCurrencyId(soapModel.getCommerceCurrencyId());
model.setGroupId(soapModel.getGroupId());
model.setCompanyId(soapModel.getCompanyId());
model.setUserId(soapModel.getUserId());
model.setUserName(soapModel.getUserName());
model.setCreateDate(soapModel.getCreateDate());
model.setModifiedDate(soapModel.getModifiedDate());
model.setCode(soapModel.getCode());
model.setName(soapModel.getName());
model.setRate(soapModel.getRate());
model.setFormatPattern(soapModel.getFormatPattern());
model.setMaxFractionDigits(soapModel.getMaxFractionDigits());
model.setMinFractionDigits(soapModel.getMinFractionDigits());
model.setRoundingMode(soapModel.getRoundingMode());
model.setPrimary(soapModel.isPrimary());
model.setPriority(soapModel.getPriority());
model.setActive(soapModel.isActive());
model.setLastPublishDate(soapModel.getLastPublishDate());
return model;
} } | public class class_name {
public static CommerceCurrency toModel(CommerceCurrencySoap soapModel) {
if (soapModel == null) {
return null; // depends on control dependency: [if], data = [none]
}
CommerceCurrency model = new CommerceCurrencyImpl();
model.setUuid(soapModel.getUuid());
model.setCommerceCurrencyId(soapModel.getCommerceCurrencyId());
model.setGroupId(soapModel.getGroupId());
model.setCompanyId(soapModel.getCompanyId());
model.setUserId(soapModel.getUserId());
model.setUserName(soapModel.getUserName());
model.setCreateDate(soapModel.getCreateDate());
model.setModifiedDate(soapModel.getModifiedDate());
model.setCode(soapModel.getCode());
model.setName(soapModel.getName());
model.setRate(soapModel.getRate());
model.setFormatPattern(soapModel.getFormatPattern());
model.setMaxFractionDigits(soapModel.getMaxFractionDigits());
model.setMinFractionDigits(soapModel.getMinFractionDigits());
model.setRoundingMode(soapModel.getRoundingMode());
model.setPrimary(soapModel.isPrimary());
model.setPriority(soapModel.getPriority());
model.setActive(soapModel.isActive());
model.setLastPublishDate(soapModel.getLastPublishDate());
return model;
} } |
public class class_name {
public String getScalarString(StructureMembers.Member m) {
if (m.getDataType() == DataType.STRING) {
Array data = getArray(m);
if (data == null)
data = getArray(m);
return (String) data.getObject(0);
} else {
char[] ba = getJavaArrayChar(m);
int count = 0;
while (count < ba.length) {
if (0 == ba[count]) break;
count++;
}
return new String(ba, 0, count);
}
} } | public class class_name {
public String getScalarString(StructureMembers.Member m) {
if (m.getDataType() == DataType.STRING) {
Array data = getArray(m);
if (data == null)
data = getArray(m);
return (String) data.getObject(0); // depends on control dependency: [if], data = [none]
} else {
char[] ba = getJavaArrayChar(m);
int count = 0;
while (count < ba.length) {
if (0 == ba[count]) break;
count++; // depends on control dependency: [while], data = [none]
}
return new String(ba, 0, count); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void printHtmlPackageDir(PrintWriter out, String strTag, String strParams, String strData)
{
if (m_recDetail.getField("ClassPackage") != null)
{
String string = m_recDetail.getField("ClassPackage").toString();
string = string.replace('.', '/');
out.print(string);
}
} } | public class class_name {
public void printHtmlPackageDir(PrintWriter out, String strTag, String strParams, String strData)
{
if (m_recDetail.getField("ClassPackage") != null)
{
String string = m_recDetail.getField("ClassPackage").toString();
string = string.replace('.', '/'); // depends on control dependency: [if], data = [none]
out.print(string); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
void _emitRexRM(int w, int opReg, Operand rm) {
if (is64()) {
boolean r = (opReg & 0x8) != 0;
boolean x = false;
boolean b = false;
if (rm.isReg()) {
b = (((BaseReg) rm).code() & 0x8) != 0;
} else if (rm.isMem()) {
x = (((Mem) rm).index() & 0x8) != 0 && ((Mem) rm).index() != NO_REG;
b = (((Mem) rm).base() & 0x8) != 0 && ((Mem) rm).base() != NO_REG;
}
// w Default operand size(0=Default, 1=64 bits).
// r Register field (1=high bit extension of the ModR/M REG field).
// x Index field (1=high bit extension of the SIB Index field).
// b Base field (1=high bit extension of the ModR/M or SIB Base field).
if (w != 0 || r || x || b || (_properties & (1 << PROPERTY_X86_FORCE_REX)) != 0) {
_emitByte(0x40 | (w << 3) | (intValue(r) << 2) | (intValue(x) << 1) | intValue(b));
}
}
} } | public class class_name {
void _emitRexRM(int w, int opReg, Operand rm) {
if (is64()) {
boolean r = (opReg & 0x8) != 0;
boolean x = false;
boolean b = false;
if (rm.isReg()) {
b = (((BaseReg) rm).code() & 0x8) != 0; // depends on control dependency: [if], data = [none]
} else if (rm.isMem()) {
x = (((Mem) rm).index() & 0x8) != 0 && ((Mem) rm).index() != NO_REG; // depends on control dependency: [if], data = [none]
b = (((Mem) rm).base() & 0x8) != 0 && ((Mem) rm).base() != NO_REG; // depends on control dependency: [if], data = [none]
}
// w Default operand size(0=Default, 1=64 bits).
// r Register field (1=high bit extension of the ModR/M REG field).
// x Index field (1=high bit extension of the SIB Index field).
// b Base field (1=high bit extension of the ModR/M or SIB Base field).
if (w != 0 || r || x || b || (_properties & (1 << PROPERTY_X86_FORCE_REX)) != 0) {
_emitByte(0x40 | (w << 3) | (intValue(r) << 2) | (intValue(x) << 1) | intValue(b)); // depends on control dependency: [if], data = [(w]
}
}
} } |
public class class_name {
synchronized void checkForUpdates() {
if (!m_updateSet.isEmpty()) {
Set<CmsUUID> copiedIds = Sets.newHashSet(m_updateSet);
m_updateSet.clear();
if (copiedIds.contains(CmsUUID.getNullUUID())) {
LOG.info("Updating detail name cache: reloading...");
reload();
} else {
LOG.info("Updating detail name cache. Number of changed files: " + copiedIds.size());
CmsManyToOneMap<String, CmsUUID> cacheCopy = new CmsManyToOneMap<String, CmsUUID>(m_detailIdCache);
for (CmsUUID id : copiedIds) {
Set<String> urlNames = getUrlNames(id);
cacheCopy.removeValue(id);
for (String urlName : urlNames) {
cacheCopy.put(urlName, id);
}
}
m_detailIdCache = cacheCopy;
}
}
} } | public class class_name {
synchronized void checkForUpdates() {
if (!m_updateSet.isEmpty()) {
Set<CmsUUID> copiedIds = Sets.newHashSet(m_updateSet);
m_updateSet.clear(); // depends on control dependency: [if], data = [none]
if (copiedIds.contains(CmsUUID.getNullUUID())) {
LOG.info("Updating detail name cache: reloading..."); // depends on control dependency: [if], data = [none]
reload(); // depends on control dependency: [if], data = [none]
} else {
LOG.info("Updating detail name cache. Number of changed files: " + copiedIds.size()); // depends on control dependency: [if], data = [none]
CmsManyToOneMap<String, CmsUUID> cacheCopy = new CmsManyToOneMap<String, CmsUUID>(m_detailIdCache);
for (CmsUUID id : copiedIds) {
Set<String> urlNames = getUrlNames(id);
cacheCopy.removeValue(id); // depends on control dependency: [for], data = [id]
for (String urlName : urlNames) {
cacheCopy.put(urlName, id); // depends on control dependency: [for], data = [urlName]
}
}
m_detailIdCache = cacheCopy; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public static Long getLong(Config config, String path, Long def) {
if (config.hasPath(path)) {
return Long.valueOf(config.getLong(path));
}
return def;
} } | public class class_name {
public static Long getLong(Config config, String path, Long def) {
if (config.hasPath(path)) {
return Long.valueOf(config.getLong(path)); // depends on control dependency: [if], data = [none]
}
return def;
} } |
public class class_name {
public static <T, U extends T> int safeWrite(int num, ArrayWritable<U> aw, T[] array) {
final int elementsToCopy = Math.min(num, Math.min(aw.size(), array.length));
if (elementsToCopy <= 0) {
return 0;
}
aw.writeToArray(0, array, 0, elementsToCopy);
return elementsToCopy;
} } | public class class_name {
public static <T, U extends T> int safeWrite(int num, ArrayWritable<U> aw, T[] array) {
final int elementsToCopy = Math.min(num, Math.min(aw.size(), array.length));
if (elementsToCopy <= 0) {
return 0; // depends on control dependency: [if], data = [none]
}
aw.writeToArray(0, array, 0, elementsToCopy);
return elementsToCopy;
} } |
public class class_name {
private static void addEntryData(final DataSeries dataSeries, final SimpleDateFormat simpleDateFormat,
final Entry<String, List<GovernmentBodyAnnualOutcomeSummary>> entry) {
for (final GovernmentBodyAnnualOutcomeSummary data : entry.getValue()) {
final Map<Date, Double> valueMap = data.getValueMap();
for (final Entry<Date, Double> entryData : valueMap.entrySet()) {
if (entryData.getValue() != null && entryData.getValue().intValue() > 0) {
dataSeries.add(simpleDateFormat.format(entryData.getKey()) , entryData.getValue().intValue());
}
}
}
} } | public class class_name {
private static void addEntryData(final DataSeries dataSeries, final SimpleDateFormat simpleDateFormat,
final Entry<String, List<GovernmentBodyAnnualOutcomeSummary>> entry) {
for (final GovernmentBodyAnnualOutcomeSummary data : entry.getValue()) {
final Map<Date, Double> valueMap = data.getValueMap();
for (final Entry<Date, Double> entryData : valueMap.entrySet()) {
if (entryData.getValue() != null && entryData.getValue().intValue() > 0) {
dataSeries.add(simpleDateFormat.format(entryData.getKey()) , entryData.getValue().intValue()); // depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
@Override
public Stream<Entry<K, V>> apply(Stream<T> streamIn) {
Assert.notNull(streamIn, "'streamIn' must not be null");
if (this.aggregator != null){
return streamIn.collect(Collectors.toMap(this.keyExtractor, this.valueExtractor, this.aggregator)).entrySet().stream();
}
else {
return streamIn.map(val -> KVUtils.kv(this.keyExtractor.apply(val), this.valueExtractor.apply(val)));
}
} } | public class class_name {
@Override
public Stream<Entry<K, V>> apply(Stream<T> streamIn) {
Assert.notNull(streamIn, "'streamIn' must not be null");
if (this.aggregator != null){
return streamIn.collect(Collectors.toMap(this.keyExtractor, this.valueExtractor, this.aggregator)).entrySet().stream(); // depends on control dependency: [if], data = [none]
}
else {
return streamIn.map(val -> KVUtils.kv(this.keyExtractor.apply(val), this.valueExtractor.apply(val))); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public <R> R fold(Function<? super T,? extends R> success, Function<? super Throwable,? extends R> failure){
try {
return success.apply(future.join());
}catch(Throwable t){
return failure.apply(t);
}
} } | public class class_name {
public <R> R fold(Function<? super T,? extends R> success, Function<? super Throwable,? extends R> failure){
try {
return success.apply(future.join()); // depends on control dependency: [try], data = [none]
}catch(Throwable t){
return failure.apply(t);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public int doEndTag() throws JspException {
SQLExecutionTag parent = (SQLExecutionTag)
findAncestorWithClass(this, SQLExecutionTag.class);
if (parent == null) {
throw new JspTagException(
Resources.getMessage("SQL_PARAM_OUTSIDE_PARENT"));
}
Object paramValue = null;
if (value != null) {
paramValue = value;
} else if (bodyContent != null) {
paramValue = bodyContent.getString().trim();
if (((String) paramValue).trim().length() == 0) {
paramValue = null;
}
}
parent.addSQLParameter(paramValue);
return EVAL_PAGE;
} } | public class class_name {
@Override
public int doEndTag() throws JspException {
SQLExecutionTag parent = (SQLExecutionTag)
findAncestorWithClass(this, SQLExecutionTag.class);
if (parent == null) {
throw new JspTagException(
Resources.getMessage("SQL_PARAM_OUTSIDE_PARENT"));
}
Object paramValue = null;
if (value != null) {
paramValue = value;
} else if (bodyContent != null) {
paramValue = bodyContent.getString().trim();
if (((String) paramValue).trim().length() == 0) {
paramValue = null; // depends on control dependency: [if], data = [none]
}
}
parent.addSQLParameter(paramValue);
return EVAL_PAGE;
} } |
public class class_name {
public boolean performFinish() {
final IEclipsePreferences prefs = SARLEclipsePlugin.getDefault().getPreferences();
final String login = this.trackerLogin.getText();
if (Strings.isEmpty(login)) {
prefs.remove(PREFERENCE_LOGIN);
} else {
prefs.put(PREFERENCE_LOGIN, login);
}
try {
prefs.sync();
return true;
} catch (BackingStoreException e) {
ErrorDialog.openError(getShell(), e.getLocalizedMessage(), e.getLocalizedMessage(),
SARLEclipsePlugin.getDefault().createStatus(IStatus.ERROR, e));
return false;
}
} } | public class class_name {
public boolean performFinish() {
final IEclipsePreferences prefs = SARLEclipsePlugin.getDefault().getPreferences();
final String login = this.trackerLogin.getText();
if (Strings.isEmpty(login)) {
prefs.remove(PREFERENCE_LOGIN); // depends on control dependency: [if], data = [none]
} else {
prefs.put(PREFERENCE_LOGIN, login); // depends on control dependency: [if], data = [none]
}
try {
prefs.sync(); // depends on control dependency: [try], data = [none]
return true; // depends on control dependency: [try], data = [none]
} catch (BackingStoreException e) {
ErrorDialog.openError(getShell(), e.getLocalizedMessage(), e.getLocalizedMessage(),
SARLEclipsePlugin.getDefault().createStatus(IStatus.ERROR, e));
return false;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void parseAllCatalogs()
throws MalformedURLException, IOException {
// Parse all the subordinate catalogs
for (int catPos = 0; catPos < catalogs.size(); catPos++) {
Catalog c = null;
try {
c = (Catalog) catalogs.elementAt(catPos);
} catch (ClassCastException e) {
String catfile = (String) catalogs.elementAt(catPos);
c = newCatalog();
c.parseCatalog(catfile);
catalogs.setElementAt(c, catPos);
c.parseAllCatalogs();
}
}
// Parse all the DELEGATE catalogs
Enumeration enumt = catalogEntries.elements();
while (enumt.hasMoreElements()) {
CatalogEntry e = (CatalogEntry) enumt.nextElement();
if (e.getEntryType() == DELEGATE_PUBLIC
|| e.getEntryType() == DELEGATE_SYSTEM
|| e.getEntryType() == DELEGATE_URI) {
Catalog dcat = newCatalog();
dcat.parseCatalog(e.getEntryArg(1));
}
}
} } | public class class_name {
public void parseAllCatalogs()
throws MalformedURLException, IOException {
// Parse all the subordinate catalogs
for (int catPos = 0; catPos < catalogs.size(); catPos++) {
Catalog c = null;
try {
c = (Catalog) catalogs.elementAt(catPos); // depends on control dependency: [try], data = [none]
} catch (ClassCastException e) {
String catfile = (String) catalogs.elementAt(catPos);
c = newCatalog();
c.parseCatalog(catfile);
catalogs.setElementAt(c, catPos);
c.parseAllCatalogs();
} // depends on control dependency: [catch], data = [none]
}
// Parse all the DELEGATE catalogs
Enumeration enumt = catalogEntries.elements();
while (enumt.hasMoreElements()) {
CatalogEntry e = (CatalogEntry) enumt.nextElement();
if (e.getEntryType() == DELEGATE_PUBLIC
|| e.getEntryType() == DELEGATE_SYSTEM
|| e.getEntryType() == DELEGATE_URI) {
Catalog dcat = newCatalog();
dcat.parseCatalog(e.getEntryArg(1));
}
}
} } |
public class class_name {
@Override
public TimeZoneGenericNames cloneAsThawed() {
TimeZoneGenericNames copy = null;
try {
copy = (TimeZoneGenericNames)super.clone();
copy._frozen = false;
} catch (Throwable t) {
// This should never happen
}
return copy;
} } | public class class_name {
@Override
public TimeZoneGenericNames cloneAsThawed() {
TimeZoneGenericNames copy = null;
try {
copy = (TimeZoneGenericNames)super.clone(); // depends on control dependency: [try], data = [none]
copy._frozen = false; // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
// This should never happen
} // depends on control dependency: [catch], data = [none]
return copy;
} } |
public class class_name {
protected static void gaussianKernel(double xx , double yy , GrayF64 xy , double sigma , GrayF64 output ) {
double sigma2 = sigma*sigma;
double N = xy.width*xy.height;
for( int y = 0; y < xy.height; y++ ) {
int index = xy.startIndex + y*xy.stride;
for( int x = 0; x < xy.width; x++ , index++ ) {
// (xx + yy - 2 * xy) / numel(x)
double value = (xx + yy - 2*xy.data[index])/N;
double v = Math.exp(-Math.max(0, value) / sigma2);
output.data[index] = v;
}
}
} } | public class class_name {
protected static void gaussianKernel(double xx , double yy , GrayF64 xy , double sigma , GrayF64 output ) {
double sigma2 = sigma*sigma;
double N = xy.width*xy.height;
for( int y = 0; y < xy.height; y++ ) {
int index = xy.startIndex + y*xy.stride;
for( int x = 0; x < xy.width; x++ , index++ ) {
// (xx + yy - 2 * xy) / numel(x)
double value = (xx + yy - 2*xy.data[index])/N;
double v = Math.exp(-Math.max(0, value) / sigma2);
output.data[index] = v; // depends on control dependency: [for], data = [none]
}
}
} } |
public class class_name {
private static short[] makeDecodingTable(final short[] encodingTable, final int numByteValues) {
final short[] decodingTable = new short[4096];
for (int byteValue = 0; byteValue < numByteValues; byteValue++) {
final int encodingEntry = encodingTable[byteValue] & 0xFFFF;
final int codeValue = encodingEntry & 0xfff;
final int codeLength = encodingEntry >> 12;
final int decodingEntry = (codeLength << 8) | byteValue;
final int garbageLength = 12 - codeLength;
final int numCopies = 1 << garbageLength;
for (int garbageBits = 0; garbageBits < numCopies; garbageBits++) {
final int extendedCodeValue = codeValue | (garbageBits << codeLength);
decodingTable[extendedCodeValue & 0xfff] = (short) decodingEntry;
}
}
return (decodingTable);
} } | public class class_name {
private static short[] makeDecodingTable(final short[] encodingTable, final int numByteValues) {
final short[] decodingTable = new short[4096];
for (int byteValue = 0; byteValue < numByteValues; byteValue++) {
final int encodingEntry = encodingTable[byteValue] & 0xFFFF;
final int codeValue = encodingEntry & 0xfff;
final int codeLength = encodingEntry >> 12;
final int decodingEntry = (codeLength << 8) | byteValue;
final int garbageLength = 12 - codeLength;
final int numCopies = 1 << garbageLength;
for (int garbageBits = 0; garbageBits < numCopies; garbageBits++) {
final int extendedCodeValue = codeValue | (garbageBits << codeLength);
decodingTable[extendedCodeValue & 0xfff] = (short) decodingEntry; // depends on control dependency: [for], data = [none]
}
}
return (decodingTable);
} } |
public class class_name {
public void setTerm(long term) {
if (term > this.term) {
this.term = term;
this.leader = null;
this.lastVotedFor = null;
meta.storeTerm(this.term);
meta.storeVote(this.lastVotedFor);
log.debug("Set term {}", term);
}
} } | public class class_name {
public void setTerm(long term) {
if (term > this.term) {
this.term = term; // depends on control dependency: [if], data = [none]
this.leader = null; // depends on control dependency: [if], data = [none]
this.lastVotedFor = null; // depends on control dependency: [if], data = [none]
meta.storeTerm(this.term); // depends on control dependency: [if], data = [this.term)]
meta.storeVote(this.lastVotedFor); // depends on control dependency: [if], data = [none]
log.debug("Set term {}", term); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public boolean deleteUser(String userName) {
// load user info from file
if (resourcePath != null) {
lock.writeLock().lock();
try {
File userDir = new File(resourcePath, "users");
if (userDir.isDirectory()) {
// get the file which corresponds to the user
File userFile = new File(userDir.getAbsolutePath(), userName);
return userFile.delete();
}
} finally {
lock.writeLock().unlock();
}
} // end if resourcePath not null
return false;
} } | public class class_name {
public boolean deleteUser(String userName) {
// load user info from file
if (resourcePath != null) {
lock.writeLock().lock(); // depends on control dependency: [if], data = [none]
try {
File userDir = new File(resourcePath, "users");
if (userDir.isDirectory()) {
// get the file which corresponds to the user
File userFile = new File(userDir.getAbsolutePath(), userName);
return userFile.delete(); // depends on control dependency: [if], data = [none]
}
} finally {
lock.writeLock().unlock();
}
} // end if resourcePath not null
return false;
} } |
public class class_name {
public String getProjectId() {
try {
String gcloudProject = gcloud.getConfig().getProject();
if (gcloudProject == null || gcloudProject.trim().isEmpty()) {
throw new RuntimeException("Project was not found in gcloud config");
}
return gcloudProject;
} catch (CloudSdkNotFoundException
| CloudSdkOutOfDateException
| CloudSdkVersionFileException
| IOException
| ProcessHandlerException ex) {
throw new RuntimeException("Failed to read project from gcloud config", ex);
}
} } | public class class_name {
public String getProjectId() {
try {
String gcloudProject = gcloud.getConfig().getProject();
if (gcloudProject == null || gcloudProject.trim().isEmpty()) {
throw new RuntimeException("Project was not found in gcloud config");
}
return gcloudProject; // depends on control dependency: [try], data = [none]
} catch (CloudSdkNotFoundException
| CloudSdkOutOfDateException
| CloudSdkVersionFileException
| IOException
| ProcessHandlerException ex) {
throw new RuntimeException("Failed to read project from gcloud config", ex);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public EClass getNewServiceDescriptor() {
if (newServiceDescriptorEClass == null) {
newServiceDescriptorEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(StorePackage.eNS_URI).getEClassifiers().get(112);
}
return newServiceDescriptorEClass;
} } | public class class_name {
@Override
public EClass getNewServiceDescriptor() {
if (newServiceDescriptorEClass == null) {
newServiceDescriptorEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(StorePackage.eNS_URI).getEClassifiers().get(112);
// depends on control dependency: [if], data = [none]
}
return newServiceDescriptorEClass;
} } |
public class class_name {
public LatLonPoint projToLatLon(ProjectionPoint world, LatLonPointImpl result) {
double toLat, toLon;
double fromX = (world.getX() - falseEasting) / totalScale; // assumes cartesion coords in km
double fromY = (world.getY() - falseNorthing) / totalScale;
fromY = rho0 - fromY;
double rho = MapMath.distance(fromX, fromY);
if (rho != 0) {
if (n < 0.0) {
rho = -rho;
fromX = -fromX;
fromY = -fromY;
}
if (isSpherical)
toLat = 2.0 * Math.atan(Math.pow(c / rho, 1.0/n)) - MapMath.HALFPI;
else
toLat = MapMath.phi2(Math.pow(rho / c, 1.0/n), e);
toLon = Math.atan2(fromX, fromY) / n;
//coverity[swapped_arguments]
} else {
toLon = 0.0;
toLat = n > 0.0 ? MapMath.HALFPI : -MapMath.HALFPI;
}
result.setLatitude(Math.toDegrees(toLat));
result.setLongitude(Math.toDegrees(toLon) + lon0deg);
return result;
} } | public class class_name {
public LatLonPoint projToLatLon(ProjectionPoint world, LatLonPointImpl result) {
double toLat, toLon;
double fromX = (world.getX() - falseEasting) / totalScale; // assumes cartesion coords in km
double fromY = (world.getY() - falseNorthing) / totalScale;
fromY = rho0 - fromY;
double rho = MapMath.distance(fromX, fromY);
if (rho != 0) {
if (n < 0.0) {
rho = -rho;
// depends on control dependency: [if], data = [none]
fromX = -fromX;
// depends on control dependency: [if], data = [none]
fromY = -fromY;
// depends on control dependency: [if], data = [none]
}
if (isSpherical)
toLat = 2.0 * Math.atan(Math.pow(c / rho, 1.0/n)) - MapMath.HALFPI;
else
toLat = MapMath.phi2(Math.pow(rho / c, 1.0/n), e);
toLon = Math.atan2(fromX, fromY) / n;
// depends on control dependency: [if], data = [none]
//coverity[swapped_arguments]
} else {
toLon = 0.0;
// depends on control dependency: [if], data = [none]
toLat = n > 0.0 ? MapMath.HALFPI : -MapMath.HALFPI;
// depends on control dependency: [if], data = [none]
}
result.setLatitude(Math.toDegrees(toLat));
result.setLongitude(Math.toDegrees(toLon) + lon0deg);
return result;
} } |
public class class_name {
private ParseTree parseStatementStandard() {
switch (peekType()) {
case OPEN_CURLY:
return parseBlock();
case CONST:
case VAR:
return parseVariableStatement();
case SEMI_COLON:
return parseEmptyStatement();
case IF:
return parseIfStatement();
case DO:
return parseDoWhileStatement();
case WHILE:
return parseWhileStatement();
case FOR:
return parseForStatement();
case CONTINUE:
return parseContinueStatement();
case BREAK:
return parseBreakStatement();
case RETURN:
return parseReturnStatement();
case WITH:
return parseWithStatement();
case SWITCH:
return parseSwitchStatement();
case THROW:
return parseThrowStatement();
case TRY:
return parseTryStatement();
case DEBUGGER:
return parseDebuggerStatement();
default:
if (peekLabelledStatement()) {
return parseLabelledStatement();
}
return parseExpressionStatement();
}
} } | public class class_name {
private ParseTree parseStatementStandard() {
switch (peekType()) {
case OPEN_CURLY:
return parseBlock();
case CONST:
case VAR:
return parseVariableStatement();
case SEMI_COLON:
return parseEmptyStatement();
case IF:
return parseIfStatement();
case DO:
return parseDoWhileStatement();
case WHILE:
return parseWhileStatement();
case FOR:
return parseForStatement();
case CONTINUE:
return parseContinueStatement();
case BREAK:
return parseBreakStatement();
case RETURN:
return parseReturnStatement();
case WITH:
return parseWithStatement();
case SWITCH:
return parseSwitchStatement();
case THROW:
return parseThrowStatement();
case TRY:
return parseTryStatement();
case DEBUGGER:
return parseDebuggerStatement();
default:
if (peekLabelledStatement()) {
return parseLabelledStatement(); // depends on control dependency: [if], data = [none]
}
return parseExpressionStatement();
}
} } |
public class class_name {
protected synchronized int convertArrayToBaseOid(int oid) {
Integer i = pgArrayToPgType.get(oid);
if (i == null) {
return oid;
}
return i;
} } | public class class_name {
protected synchronized int convertArrayToBaseOid(int oid) {
Integer i = pgArrayToPgType.get(oid);
if (i == null) {
return oid; // depends on control dependency: [if], data = [none]
}
return i;
} } |
public class class_name {
@Override
public void applyConfig(final ConfigSettings config)
{
Object o = config
.getConfigParameter(ConfigurationKeys.PATH_PROGRAM_7ZIP);
if (o != null) {
controller.setEnable7Zip(true);
sevenZipPathField.setText((String) o);
}
else {
controller.setEnable7Zip(false);
sevenZipPathField.setText("");
}
} } | public class class_name {
@Override
public void applyConfig(final ConfigSettings config)
{
Object o = config
.getConfigParameter(ConfigurationKeys.PATH_PROGRAM_7ZIP);
if (o != null) {
controller.setEnable7Zip(true); // depends on control dependency: [if], data = [none]
sevenZipPathField.setText((String) o); // depends on control dependency: [if], data = [none]
}
else {
controller.setEnable7Zip(false); // depends on control dependency: [if], data = [none]
sevenZipPathField.setText(""); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static <T> void randomSample(Collection<T> source, Collection<T> dest, int samples, Random rand)
{
if(samples > source.size())
throw new IllegalArgumentException("Can not obtain a number of samples larger than the source population");
else if(samples <= 0)
throw new IllegalArgumentException("Sample size must be positive");
//Use samples to keep track of how many more samples we need
int remainingPopulation = source.size();
for(T member : source)
{
if(rand.nextInt(remainingPopulation) < samples)
{
dest.add(member);
samples--;
}
remainingPopulation--;
}
} } | public class class_name {
public static <T> void randomSample(Collection<T> source, Collection<T> dest, int samples, Random rand)
{
if(samples > source.size())
throw new IllegalArgumentException("Can not obtain a number of samples larger than the source population");
else if(samples <= 0)
throw new IllegalArgumentException("Sample size must be positive");
//Use samples to keep track of how many more samples we need
int remainingPopulation = source.size();
for(T member : source)
{
if(rand.nextInt(remainingPopulation) < samples)
{
dest.add(member); // depends on control dependency: [if], data = [none]
samples--; // depends on control dependency: [if], data = [none]
}
remainingPopulation--; // depends on control dependency: [for], data = [none]
}
} } |
public class class_name {
public static String buildTimeDifference(final Calendar pStartCalendar, final Calendar pEndCalendar) {
if (pStartCalendar == null) {
return CALENDAR_IS_NULL_ERROR_MESSAGE;
}
if (pEndCalendar == null) {
return CALENDAR_IS_NULL_ERROR_MESSAGE;
}
if (pEndCalendar.before(pStartCalendar)) {
return CALENDAR_CAUSATION_ERROR_MESSAGE;
}
int dateDiff = pEndCalendar.get(Calendar.DATE) - pStartCalendar.get(Calendar.DATE);
int hourDiff = pEndCalendar.get(Calendar.HOUR_OF_DAY) - pStartCalendar.get(Calendar.HOUR_OF_DAY);
int minuteDiff = pEndCalendar.get(Calendar.MINUTE) - pStartCalendar.get(Calendar.MINUTE);
int secondDiff = pEndCalendar.get(Calendar.SECOND) - pStartCalendar.get(Calendar.SECOND);
int milliSecondDiff = pEndCalendar.get(Calendar.MILLISECOND) - pStartCalendar.get(Calendar.MILLISECOND);
if (milliSecondDiff < 0) {
secondDiff--;
milliSecondDiff += 1000;
}
if (secondDiff < 0) {
minuteDiff--;
secondDiff += 60;
}
if (minuteDiff < 0) {
hourDiff--;
minuteDiff += 60;
}
while (dateDiff > 0) {
dateDiff--;
hourDiff += 24;
}
// Time difference presentation format
StringBuilder buffer = new StringBuilder();
if ((hourDiff == 0) && (minuteDiff == 0) && (secondDiff == 0)) {
buffer.append(milliSecondDiff);
buffer.append("ms");
} else if ((hourDiff == 0) && (minuteDiff == 0)) {
buffer.append(secondDiff);
buffer.append("s ");
buffer.append(milliSecondDiff);
buffer.append("ms");
} else if (hourDiff == 0) {
buffer.append(minuteDiff);
buffer.append("m ");
buffer.append(secondDiff);
buffer.append(",");
buffer.append(milliSecondDiff);
buffer.append("s");
} else {
buffer.append(hourDiff);
buffer.append("h ");
buffer.append(minuteDiff);
buffer.append("m ");
buffer.append(secondDiff);
buffer.append(",");
buffer.append(milliSecondDiff);
buffer.append("s");
}
return buffer.toString();
} } | public class class_name {
public static String buildTimeDifference(final Calendar pStartCalendar, final Calendar pEndCalendar) {
if (pStartCalendar == null) {
return CALENDAR_IS_NULL_ERROR_MESSAGE;
// depends on control dependency: [if], data = [none]
}
if (pEndCalendar == null) {
return CALENDAR_IS_NULL_ERROR_MESSAGE;
// depends on control dependency: [if], data = [none]
}
if (pEndCalendar.before(pStartCalendar)) {
return CALENDAR_CAUSATION_ERROR_MESSAGE;
// depends on control dependency: [if], data = [none]
}
int dateDiff = pEndCalendar.get(Calendar.DATE) - pStartCalendar.get(Calendar.DATE);
int hourDiff = pEndCalendar.get(Calendar.HOUR_OF_DAY) - pStartCalendar.get(Calendar.HOUR_OF_DAY);
int minuteDiff = pEndCalendar.get(Calendar.MINUTE) - pStartCalendar.get(Calendar.MINUTE);
int secondDiff = pEndCalendar.get(Calendar.SECOND) - pStartCalendar.get(Calendar.SECOND);
int milliSecondDiff = pEndCalendar.get(Calendar.MILLISECOND) - pStartCalendar.get(Calendar.MILLISECOND);
if (milliSecondDiff < 0) {
secondDiff--;
// depends on control dependency: [if], data = [none]
milliSecondDiff += 1000;
// depends on control dependency: [if], data = [none]
}
if (secondDiff < 0) {
minuteDiff--;
// depends on control dependency: [if], data = [none]
secondDiff += 60;
// depends on control dependency: [if], data = [none]
}
if (minuteDiff < 0) {
hourDiff--;
// depends on control dependency: [if], data = [none]
minuteDiff += 60;
// depends on control dependency: [if], data = [none]
}
while (dateDiff > 0) {
dateDiff--;
// depends on control dependency: [while], data = [none]
hourDiff += 24;
// depends on control dependency: [while], data = [none]
}
// Time difference presentation format
StringBuilder buffer = new StringBuilder();
if ((hourDiff == 0) && (minuteDiff == 0) && (secondDiff == 0)) {
buffer.append(milliSecondDiff);
// depends on control dependency: [if], data = [none]
buffer.append("ms");
// depends on control dependency: [if], data = [none]
} else if ((hourDiff == 0) && (minuteDiff == 0)) {
buffer.append(secondDiff);
// depends on control dependency: [if], data = [none]
buffer.append("s ");
// depends on control dependency: [if], data = [none]
buffer.append(milliSecondDiff);
// depends on control dependency: [if], data = [none]
buffer.append("ms");
// depends on control dependency: [if], data = [none]
} else if (hourDiff == 0) {
buffer.append(minuteDiff);
// depends on control dependency: [if], data = [none]
buffer.append("m ");
// depends on control dependency: [if], data = [none]
buffer.append(secondDiff);
// depends on control dependency: [if], data = [none]
buffer.append(",");
// depends on control dependency: [if], data = [none]
buffer.append(milliSecondDiff);
// depends on control dependency: [if], data = [none]
buffer.append("s");
// depends on control dependency: [if], data = [none]
} else {
buffer.append(hourDiff);
// depends on control dependency: [if], data = [(hourDiff]
buffer.append("h ");
// depends on control dependency: [if], data = [none]
buffer.append(minuteDiff);
// depends on control dependency: [if], data = [none]
buffer.append("m ");
// depends on control dependency: [if], data = [none]
buffer.append(secondDiff);
// depends on control dependency: [if], data = [none]
buffer.append(",");
// depends on control dependency: [if], data = [none]
buffer.append(milliSecondDiff);
// depends on control dependency: [if], data = [none]
buffer.append("s");
// depends on control dependency: [if], data = [none]
}
return buffer.toString();
} } |
public class class_name {
private boolean isSerializableType(Class<?> type) {
Boolean serializable = cache.get(type);
if (serializable != null) {
return serializable;
}
if (type == Object.class) {
return true;
}
if (primitiveTypes.contains(type)) {
cache.put(type, true);
return true;
}
if (JsonSerializable.class.isAssignableFrom(type)) {
cache.put(type, true);
return true;
}
for (Class<?> clazz : serializableTypes) {
if (clazz.isAssignableFrom(type)) {
cache.put(type, true);
return true;
}
}
cache.put(type, false);
return false;
} } | public class class_name {
private boolean isSerializableType(Class<?> type) {
Boolean serializable = cache.get(type);
if (serializable != null) {
return serializable; // depends on control dependency: [if], data = [none]
}
if (type == Object.class) {
return true; // depends on control dependency: [if], data = [none]
}
if (primitiveTypes.contains(type)) {
cache.put(type, true); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
if (JsonSerializable.class.isAssignableFrom(type)) {
cache.put(type, true); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
for (Class<?> clazz : serializableTypes) {
if (clazz.isAssignableFrom(type)) {
cache.put(type, true); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
}
cache.put(type, false);
return false;
} } |
public class class_name {
public static String getParameter(ServletRequest request, String name) {
String s = request.getParameter(name);
if (s == null) {
return null;
}
s = s.trim();
return s.length() == 0? null: s;
} } | public class class_name {
public static String getParameter(ServletRequest request, String name) {
String s = request.getParameter(name);
if (s == null) {
return null; // depends on control dependency: [if], data = [none]
}
s = s.trim();
return s.length() == 0? null: s;
} } |
public class class_name {
@Override
public final TableTreeNode getNodeAtLine(final int row) {
TableTreeNode node = root.next(); // the root node is never used
for (int index = 0; node != null && index < row; index++) {
node = node.next();
}
return node;
} } | public class class_name {
@Override
public final TableTreeNode getNodeAtLine(final int row) {
TableTreeNode node = root.next(); // the root node is never used
for (int index = 0; node != null && index < row; index++) {
node = node.next(); // depends on control dependency: [for], data = [none]
}
return node;
} } |
public class class_name {
public int decode(IBitStream ld, int cnt) throws AACException {
int result = 0;
int num_align_bits = 0;
long num_sbr_bits1 = ld.getPosition();
int num_sbr_bits2;
int saved_start_freq, saved_samplerate_mode;
int saved_stop_freq, saved_freq_scale;
int saved_xover_band;
boolean saved_alter_scale;
int bs_extension_type = ld.readBits(4);
if(bs_extension_type==EXT_SBR_DATA_CRC) {
this.bs_sbr_crc_bits = ld.readBits(10);
}
/* save old header values, in case the new ones are corrupted */
saved_start_freq = this.bs_start_freq;
saved_samplerate_mode = this.bs_samplerate_mode;
saved_stop_freq = this.bs_stop_freq;
saved_freq_scale = this.bs_freq_scale;
saved_alter_scale = this.bs_alter_scale;
saved_xover_band = this.bs_xover_band;
this.bs_header_flag = ld.readBool();
if(this.bs_header_flag)
sbr_header(ld);
/* Reset? */
sbr_reset();
/* first frame should have a header */
//if (!(sbr.frame == 0 && sbr.bs_header_flag == 0))
if(this.header_count!=0) {
if(this.Reset||(this.bs_header_flag&&this.just_seeked)) {
int rt = calc_sbr_tables(this.bs_start_freq, this.bs_stop_freq,
this.bs_samplerate_mode, this.bs_freq_scale,
this.bs_alter_scale, this.bs_xover_band);
/* if an error occured with the new header values revert to the old ones */
if(rt>0) {
calc_sbr_tables(saved_start_freq, saved_stop_freq,
saved_samplerate_mode, saved_freq_scale,
saved_alter_scale, saved_xover_band);
}
}
if(result==0) {
result = sbr_data(ld);
/* sbr_data() returning an error means that there was an error in
envelope_time_border_vector().
In this case the old time border vector is saved and all the previous
data normally read after sbr_grid() is saved.
*/
/* to be on the safe side, calculate old sbr tables in case of error */
if((result>0)
&&(this.Reset||(this.bs_header_flag&&this.just_seeked))) {
calc_sbr_tables(saved_start_freq, saved_stop_freq,
saved_samplerate_mode, saved_freq_scale,
saved_alter_scale, saved_xover_band);
}
/* we should be able to safely set result to 0 now, */
/* but practise indicates this doesn't work well */
}
}
else {
result = 1;
}
num_sbr_bits2 = (int) (ld.getPosition()-num_sbr_bits1);
/* check if we read more bits then were available for sbr */
if(8*cnt<num_sbr_bits2) {
throw new AACException("frame overread");
//faad_resetbits(ld, num_sbr_bits1+8*cnt);
//num_sbr_bits2 = 8*cnt;
/* turn off PS for the unfortunate case that we randomly read some
* PS data that looks correct */
//this.ps_used = 0;
/* Make sure it doesn't decode SBR in this frame, or we'll get glitches */
//return 1;
}
{
/* -4 does not apply, bs_extension_type is re-read in this function */
num_align_bits = 8*cnt /*- 4*/-num_sbr_bits2;
while(num_align_bits>7) {
ld.readBits(8);
num_align_bits -= 8;
}
ld.readBits(num_align_bits);
}
return result;
} } | public class class_name {
public int decode(IBitStream ld, int cnt) throws AACException {
int result = 0;
int num_align_bits = 0;
long num_sbr_bits1 = ld.getPosition();
int num_sbr_bits2;
int saved_start_freq, saved_samplerate_mode;
int saved_stop_freq, saved_freq_scale;
int saved_xover_band;
boolean saved_alter_scale;
int bs_extension_type = ld.readBits(4);
if(bs_extension_type==EXT_SBR_DATA_CRC) {
this.bs_sbr_crc_bits = ld.readBits(10);
}
/* save old header values, in case the new ones are corrupted */
saved_start_freq = this.bs_start_freq;
saved_samplerate_mode = this.bs_samplerate_mode;
saved_stop_freq = this.bs_stop_freq;
saved_freq_scale = this.bs_freq_scale;
saved_alter_scale = this.bs_alter_scale;
saved_xover_band = this.bs_xover_band;
this.bs_header_flag = ld.readBool();
if(this.bs_header_flag)
sbr_header(ld);
/* Reset? */
sbr_reset();
/* first frame should have a header */
//if (!(sbr.frame == 0 && sbr.bs_header_flag == 0))
if(this.header_count!=0) {
if(this.Reset||(this.bs_header_flag&&this.just_seeked)) {
int rt = calc_sbr_tables(this.bs_start_freq, this.bs_stop_freq,
this.bs_samplerate_mode, this.bs_freq_scale,
this.bs_alter_scale, this.bs_xover_band);
/* if an error occured with the new header values revert to the old ones */
if(rt>0) {
calc_sbr_tables(saved_start_freq, saved_stop_freq,
saved_samplerate_mode, saved_freq_scale,
saved_alter_scale, saved_xover_band); // depends on control dependency: [if], data = [none]
}
}
if(result==0) {
result = sbr_data(ld);
/* sbr_data() returning an error means that there was an error in
envelope_time_border_vector().
In this case the old time border vector is saved and all the previous
data normally read after sbr_grid() is saved.
*/
/* to be on the safe side, calculate old sbr tables in case of error */
if((result>0)
&&(this.Reset||(this.bs_header_flag&&this.just_seeked))) {
calc_sbr_tables(saved_start_freq, saved_stop_freq,
saved_samplerate_mode, saved_freq_scale,
saved_alter_scale, saved_xover_band);
}
/* we should be able to safely set result to 0 now, */
/* but practise indicates this doesn't work well */
}
}
else {
result = 1;
}
num_sbr_bits2 = (int) (ld.getPosition()-num_sbr_bits1);
/* check if we read more bits then were available for sbr */
if(8*cnt<num_sbr_bits2) {
throw new AACException("frame overread");
//faad_resetbits(ld, num_sbr_bits1+8*cnt);
//num_sbr_bits2 = 8*cnt;
/* turn off PS for the unfortunate case that we randomly read some
* PS data that looks correct */
//this.ps_used = 0;
/* Make sure it doesn't decode SBR in this frame, or we'll get glitches */
//return 1;
}
{
/* -4 does not apply, bs_extension_type is re-read in this function */
num_align_bits = 8*cnt /*- 4*/-num_sbr_bits2;
while(num_align_bits>7) {
ld.readBits(8);
num_align_bits -= 8;
}
ld.readBits(num_align_bits);
}
return result;
} } |
public class class_name {
public void marshall(DescribeAutomationStepExecutionsRequest describeAutomationStepExecutionsRequest, ProtocolMarshaller protocolMarshaller) {
if (describeAutomationStepExecutionsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeAutomationStepExecutionsRequest.getAutomationExecutionId(), AUTOMATIONEXECUTIONID_BINDING);
protocolMarshaller.marshall(describeAutomationStepExecutionsRequest.getFilters(), FILTERS_BINDING);
protocolMarshaller.marshall(describeAutomationStepExecutionsRequest.getNextToken(), NEXTTOKEN_BINDING);
protocolMarshaller.marshall(describeAutomationStepExecutionsRequest.getMaxResults(), MAXRESULTS_BINDING);
protocolMarshaller.marshall(describeAutomationStepExecutionsRequest.getReverseOrder(), REVERSEORDER_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(DescribeAutomationStepExecutionsRequest describeAutomationStepExecutionsRequest, ProtocolMarshaller protocolMarshaller) {
if (describeAutomationStepExecutionsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeAutomationStepExecutionsRequest.getAutomationExecutionId(), AUTOMATIONEXECUTIONID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeAutomationStepExecutionsRequest.getFilters(), FILTERS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeAutomationStepExecutionsRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeAutomationStepExecutionsRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeAutomationStepExecutionsRequest.getReverseOrder(), REVERSEORDER_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public double[][] latLonToProj(double[][] from, double[][] to,
int latIndex, int lonIndex) {
int cnt = from[0].length;
double[] fromLatA = from[latIndex];
double[] fromLonA = from[lonIndex];
double[] resultXA = to[INDEX_X];
double[] resultYA = to[INDEX_Y];
double toX, toY;
for (int i = 0; i < cnt; i++) {
double fromLat = fromLatA[i];
double fromLon = fromLonA[i];
fromLat = Math.toRadians(fromLat);
fromLon = Math.toRadians(fromLon);
double rho = computeRho(fromLat);
double theta = computeTheta(fromLon);
toX = rho * Math.sin(theta);
toY = rho0 - rho * Math.cos(theta);
resultXA[i] = toX + falseEasting;
resultYA[i] = toY + falseNorthing;
}
return to;
} } | public class class_name {
public double[][] latLonToProj(double[][] from, double[][] to,
int latIndex, int lonIndex) {
int cnt = from[0].length;
double[] fromLatA = from[latIndex];
double[] fromLonA = from[lonIndex];
double[] resultXA = to[INDEX_X];
double[] resultYA = to[INDEX_Y];
double toX, toY;
for (int i = 0; i < cnt; i++) {
double fromLat = fromLatA[i];
double fromLon = fromLonA[i];
fromLat = Math.toRadians(fromLat);
// depends on control dependency: [for], data = [none]
fromLon = Math.toRadians(fromLon);
// depends on control dependency: [for], data = [none]
double rho = computeRho(fromLat);
double theta = computeTheta(fromLon);
toX = rho * Math.sin(theta);
// depends on control dependency: [for], data = [none]
toY = rho0 - rho * Math.cos(theta);
// depends on control dependency: [for], data = [none]
resultXA[i] = toX + falseEasting;
// depends on control dependency: [for], data = [i]
resultYA[i] = toY + falseNorthing;
// depends on control dependency: [for], data = [i]
}
return to;
} } |
public class class_name {
public static Exception checkGrpcUrl(String url) {
try {
parseGrpcUrl(url);
return null;
} catch (Exception e) {
return e;
}
} } | public class class_name {
public static Exception checkGrpcUrl(String url) {
try {
parseGrpcUrl(url); // depends on control dependency: [try], data = [none]
return null; // depends on control dependency: [try], data = [none]
} catch (Exception e) {
return e;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private static synchronized ExecutorService initAsyncExecutorService() {
if (ASYNC_EXECUTOR_SERVICE == null) {
ASYNC_EXECUTOR_SERVICE = java.util.concurrent.Executors.newSingleThreadExecutor(
new ThreadFactory() {
public Thread newThread(Runnable r) {
Thread thread = new Thread(r, "javasimon-async");
thread.setDaemon(true);
return thread;
}
});
}
return ASYNC_EXECUTOR_SERVICE;
} } | public class class_name {
private static synchronized ExecutorService initAsyncExecutorService() {
if (ASYNC_EXECUTOR_SERVICE == null) {
ASYNC_EXECUTOR_SERVICE = java.util.concurrent.Executors.newSingleThreadExecutor(
new ThreadFactory() {
public Thread newThread(Runnable r) {
Thread thread = new Thread(r, "javasimon-async");
thread.setDaemon(true);
return thread;
}
});
// depends on control dependency: [if], data = [none]
}
return ASYNC_EXECUTOR_SERVICE;
} } |
public class class_name {
@Override
public void exportXml(final StringBuilder out)
{
if (isParsed())
{
out.append("<");
if (keychar != null)
out.append(keychar.charValue());
else if (keyword != null)
out.append(keyword);
out.append("/>");
}
} } | public class class_name {
@Override
public void exportXml(final StringBuilder out)
{
if (isParsed())
{
out.append("<"); // depends on control dependency: [if], data = [none]
if (keychar != null)
out.append(keychar.charValue());
else if (keyword != null)
out.append(keyword);
out.append("/>"); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private static String getAbsolutePath(Class<?> someClass, String nameOrSuffix, boolean append) {
if (append) {
return someClass.getName().replace('.', '/') + nameOrSuffix;
} else {
return someClass.getPackage().getName().replace('.', '/') + '/' + nameOrSuffix;
}
} } | public class class_name {
private static String getAbsolutePath(Class<?> someClass, String nameOrSuffix, boolean append) {
if (append) {
return someClass.getName().replace('.', '/') + nameOrSuffix;
// depends on control dependency: [if], data = [none]
} else {
return someClass.getPackage().getName().replace('.', '/') + '/' + nameOrSuffix;
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void propertyChange(PropertyChangeEvent evt) {
String prop = evt.getPropertyName();
if ( ReteGraph.PROP_CHILD_ADDED.equals( prop ) || ReteGraph.PROP_CHILD_REMOVED.equals( prop ) ) {
refreshChildren();
}
} } | public class class_name {
public void propertyChange(PropertyChangeEvent evt) {
String prop = evt.getPropertyName();
if ( ReteGraph.PROP_CHILD_ADDED.equals( prop ) || ReteGraph.PROP_CHILD_REMOVED.equals( prop ) ) {
refreshChildren(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override public void prefill()
{
if (credential.equals(pool.getPrefillCredential()) &&
pool.getConfiguration().isPrefill() &&
pool.getConfiguration().getMinSize() > 0 &&
listeners.size() < pool.getConfiguration().getMinSize())
{
PoolFiller.fillPool(new FillRequest(this, pool.getConfiguration().getMinSize()));
}
} } | public class class_name {
@Override public void prefill()
{
if (credential.equals(pool.getPrefillCredential()) &&
pool.getConfiguration().isPrefill() &&
pool.getConfiguration().getMinSize() > 0 &&
listeners.size() < pool.getConfiguration().getMinSize())
{
PoolFiller.fillPool(new FillRequest(this, pool.getConfiguration().getMinSize())); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void setAssociations(java.util.Collection<InstanceAssociation> associations) {
if (associations == null) {
this.associations = null;
return;
}
this.associations = new com.amazonaws.internal.SdkInternalList<InstanceAssociation>(associations);
} } | public class class_name {
public void setAssociations(java.util.Collection<InstanceAssociation> associations) {
if (associations == null) {
this.associations = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.associations = new com.amazonaws.internal.SdkInternalList<InstanceAssociation>(associations);
} } |
public class class_name {
public static boolean deletePathRecursive(Tree<ResourceMeta> tree, Path path){
if(tree.hasResource(path)) {
//delete just this resource
return tree.deleteResource(path);
}else if (tree.hasDirectory(path)) {
//list resources and delete
Set<Resource<ResourceMeta>> resources = tree.listDirectory(path);
boolean failed=false;
for (Resource<ResourceMeta> resource : resources) {
if(resource.isDirectory()){
if(!deletePathRecursive(tree,resource.getPath())){
failed=true;
}
}else {
if(!tree.deleteResource(resource.getPath())){
failed=true;
}
}
}
return !failed;
}else{
return true;
}
} } | public class class_name {
public static boolean deletePathRecursive(Tree<ResourceMeta> tree, Path path){
if(tree.hasResource(path)) {
//delete just this resource
return tree.deleteResource(path); // depends on control dependency: [if], data = [none]
}else if (tree.hasDirectory(path)) {
//list resources and delete
Set<Resource<ResourceMeta>> resources = tree.listDirectory(path);
boolean failed=false;
for (Resource<ResourceMeta> resource : resources) {
if(resource.isDirectory()){
if(!deletePathRecursive(tree,resource.getPath())){
failed=true; // depends on control dependency: [if], data = [none]
}
}else {
if(!tree.deleteResource(resource.getPath())){
failed=true; // depends on control dependency: [if], data = [none]
}
}
}
return !failed; // depends on control dependency: [if], data = [none]
}else{
return true; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public synchronized static Engine remove(String engineName) {
Engine removed = engineMap.remove(engineName);
if (removed != null && MAIN_ENGINE_NAME.equals(removed.name)) {
Engine.MAIN_ENGINE = null;
}
return removed;
} } | public class class_name {
public synchronized static Engine remove(String engineName) {
Engine removed = engineMap.remove(engineName);
if (removed != null && MAIN_ENGINE_NAME.equals(removed.name)) {
Engine.MAIN_ENGINE = null;
// depends on control dependency: [if], data = [none]
}
return removed;
} } |
public class class_name {
static ColumnIndexStore create(ParquetFileReader reader, BlockMetaData block, Set<ColumnPath> paths) {
try {
return new ColumnIndexStoreImpl(reader, block, paths);
} catch (MissingOffsetIndexException e) {
return EMPTY;
}
} } | public class class_name {
static ColumnIndexStore create(ParquetFileReader reader, BlockMetaData block, Set<ColumnPath> paths) {
try {
return new ColumnIndexStoreImpl(reader, block, paths); // depends on control dependency: [try], data = [none]
} catch (MissingOffsetIndexException e) {
return EMPTY;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public DoubleMatrix1D[] getMatrixScalingFactors(DoubleMatrix2D A){
DoubleFactory1D F1 = DoubleFactory1D.dense;
Algebra ALG = Algebra.DEFAULT;
int r = A.rows();
int c = A.columns();
DoubleMatrix1D D1 = F1.make(r, 1);
DoubleMatrix1D D2 = F1.make(c, 1);
DoubleMatrix2D AK = A.copy();
DoubleMatrix1D DR = F1.make(r, 1);
DoubleMatrix1D DC = F1.make(c, 1);
DoubleMatrix1D DRInv = F1.make(r);
DoubleMatrix1D DCInv = F1.make(c);
log.debug("eps : " + eps);
int maxIteration = 50;
for(int k=0; k<=maxIteration; k++){
double normR = -Double.MAX_VALUE;
double normC = -Double.MAX_VALUE;
for(int i=0; i<r; i++){
double dri = ALG.normInfinity(AK.viewRow(i));
DR.setQuick(i, Math.sqrt(dri));
DRInv.setQuick(i, 1./Math.sqrt(dri));
normR = Math.max(normR, Math.abs(1-dri));
}
for(int j=0; j<c; j++){
double dci = ALG.normInfinity(AK.viewColumn(j));
DC.setQuick(j, Math.sqrt(dci));
DCInv.setQuick(j, 1./Math.sqrt(dci));
normC = Math.max(normC, Math.abs(1-dci));
}
log.debug("normR: " + normR);
log.debug("normC: " + normC);
if(normR < eps && normC < eps){
break;
}
//D1 = ALG.mult(D1, DRInv);
for(int i=0; i<r; i++){
double prevD1I = D1.getQuick(i);
double newD1I = prevD1I * DRInv.getQuick(i);
D1.setQuick(i, newD1I);
}
//D2 = ALG.mult(D2, DCInv);
for(int j=0; j<c; j++){
double prevD2J = D2.getQuick(j);
double newD2J = prevD2J * DCInv.getQuick(j);
D2.setQuick(j, newD2J);
}
//log.debug("D1: " + ArrayUtils.toString(D1.toArray()));
//log.debug("D2: " + ArrayUtils.toString(D2.toArray()));
if(k==maxIteration){
log.warn("max iteration reached");
}
//AK = ALG.mult(DRInv, ALG.mult(AK, DCInv));
AK = ColtUtils.diagonalMatrixMult(DRInv, AK, DCInv);
}
return new DoubleMatrix1D[]{D1, D2};
} } | public class class_name {
@Override
public DoubleMatrix1D[] getMatrixScalingFactors(DoubleMatrix2D A){
DoubleFactory1D F1 = DoubleFactory1D.dense;
Algebra ALG = Algebra.DEFAULT;
int r = A.rows();
int c = A.columns();
DoubleMatrix1D D1 = F1.make(r, 1);
DoubleMatrix1D D2 = F1.make(c, 1);
DoubleMatrix2D AK = A.copy();
DoubleMatrix1D DR = F1.make(r, 1);
DoubleMatrix1D DC = F1.make(c, 1);
DoubleMatrix1D DRInv = F1.make(r);
DoubleMatrix1D DCInv = F1.make(c);
log.debug("eps : " + eps);
int maxIteration = 50;
for(int k=0; k<=maxIteration; k++){
double normR = -Double.MAX_VALUE;
double normC = -Double.MAX_VALUE;
for(int i=0; i<r; i++){
double dri = ALG.normInfinity(AK.viewRow(i));
DR.setQuick(i, Math.sqrt(dri));
// depends on control dependency: [for], data = [i]
DRInv.setQuick(i, 1./Math.sqrt(dri));
// depends on control dependency: [for], data = [i]
normR = Math.max(normR, Math.abs(1-dri));
// depends on control dependency: [for], data = [none]
}
for(int j=0; j<c; j++){
double dci = ALG.normInfinity(AK.viewColumn(j));
DC.setQuick(j, Math.sqrt(dci));
// depends on control dependency: [for], data = [j]
DCInv.setQuick(j, 1./Math.sqrt(dci));
// depends on control dependency: [for], data = [j]
normC = Math.max(normC, Math.abs(1-dci));
// depends on control dependency: [for], data = [none]
}
log.debug("normR: " + normR);
// depends on control dependency: [for], data = [none]
log.debug("normC: " + normC);
// depends on control dependency: [for], data = [none]
if(normR < eps && normC < eps){
break;
}
//D1 = ALG.mult(D1, DRInv);
for(int i=0; i<r; i++){
double prevD1I = D1.getQuick(i);
double newD1I = prevD1I * DRInv.getQuick(i);
D1.setQuick(i, newD1I);
// depends on control dependency: [for], data = [i]
}
//D2 = ALG.mult(D2, DCInv);
for(int j=0; j<c; j++){
double prevD2J = D2.getQuick(j);
double newD2J = prevD2J * DCInv.getQuick(j);
D2.setQuick(j, newD2J);
// depends on control dependency: [for], data = [j]
}
//log.debug("D1: " + ArrayUtils.toString(D1.toArray()));
//log.debug("D2: " + ArrayUtils.toString(D2.toArray()));
if(k==maxIteration){
log.warn("max iteration reached");
// depends on control dependency: [if], data = [none]
}
//AK = ALG.mult(DRInv, ALG.mult(AK, DCInv));
AK = ColtUtils.diagonalMatrixMult(DRInv, AK, DCInv);
// depends on control dependency: [for], data = [none]
}
return new DoubleMatrix1D[]{D1, D2};
} } |
public class class_name {
public void onJdbcTaskExecutionThrowableEvent(
JdbcTaskExecutionThrowableEvent event, ActivityContextInterface aci) {
if (tracer.isWarningEnabled()) {
tracer.warning(
"Received a JdbcTaskExecutionThrowableEvent, as result of executed task "
+ event.getTask(), event.getThrowable());
}
// end jdbc activity
final JdbcActivity activity = (JdbcActivity) aci.getActivity();
activity.endActivity();
// call back parent
final DataSourceParentSbbLocalInterface parent = (DataSourceParentSbbLocalInterface) sbbContextExt
.getSbbLocalObject().getParent();
final DataSourceJdbcTask jdbcTask = (DataSourceJdbcTask) event
.getTask();
jdbcTask.callBackParentOnException(parent);
} } | public class class_name {
public void onJdbcTaskExecutionThrowableEvent(
JdbcTaskExecutionThrowableEvent event, ActivityContextInterface aci) {
if (tracer.isWarningEnabled()) {
tracer.warning(
"Received a JdbcTaskExecutionThrowableEvent, as result of executed task "
+ event.getTask(), event.getThrowable()); // depends on control dependency: [if], data = [none]
}
// end jdbc activity
final JdbcActivity activity = (JdbcActivity) aci.getActivity();
activity.endActivity();
// call back parent
final DataSourceParentSbbLocalInterface parent = (DataSourceParentSbbLocalInterface) sbbContextExt
.getSbbLocalObject().getParent();
final DataSourceJdbcTask jdbcTask = (DataSourceJdbcTask) event
.getTask();
jdbcTask.callBackParentOnException(parent);
} } |
public class class_name {
public void marshall(DescribeChannelRequest describeChannelRequest, ProtocolMarshaller protocolMarshaller) {
if (describeChannelRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeChannelRequest.getChannelName(), CHANNELNAME_BINDING);
protocolMarshaller.marshall(describeChannelRequest.getIncludeStatistics(), INCLUDESTATISTICS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(DescribeChannelRequest describeChannelRequest, ProtocolMarshaller protocolMarshaller) {
if (describeChannelRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeChannelRequest.getChannelName(), CHANNELNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeChannelRequest.getIncludeStatistics(), INCLUDESTATISTICS_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public synchronized PathMatcher addPrefixPath(final String path, final T handler) {
if (path.isEmpty()) {
throw UndertowMessages.MESSAGES.pathMustBeSpecified();
}
final String normalizedPath = URLUtils.normalizeSlashes(path);
if (PathMatcher.STRING_PATH_SEPARATOR.equals(normalizedPath)) {
this.defaultHandler = handler;
return this;
}
paths.put(normalizedPath, handler);
buildLengths();
return this;
} } | public class class_name {
public synchronized PathMatcher addPrefixPath(final String path, final T handler) {
if (path.isEmpty()) {
throw UndertowMessages.MESSAGES.pathMustBeSpecified();
}
final String normalizedPath = URLUtils.normalizeSlashes(path);
if (PathMatcher.STRING_PATH_SEPARATOR.equals(normalizedPath)) {
this.defaultHandler = handler; // depends on control dependency: [if], data = [none]
return this; // depends on control dependency: [if], data = [none]
}
paths.put(normalizedPath, handler);
buildLengths();
return this;
} } |
public class class_name {
public List<DateTimePeriod> toDays() {
ArrayList<DateTimePeriod> list = new ArrayList<DateTimePeriod>();
// default "current" day to start datetime
DateTime currentStart = getStart();
// calculate "next" day
DateTime nextStart = currentStart.plusDays(1);
// continue adding until we've reached the end
while (nextStart.isBefore(getEnd()) || nextStart.isEqual(getEnd())) {
// its okay to add the current
list.add(new DateTimeDay(currentStart, nextStart));
// increment both
currentStart = nextStart;
nextStart = currentStart.plusDays(1);
}
return list;
} } | public class class_name {
public List<DateTimePeriod> toDays() {
ArrayList<DateTimePeriod> list = new ArrayList<DateTimePeriod>();
// default "current" day to start datetime
DateTime currentStart = getStart();
// calculate "next" day
DateTime nextStart = currentStart.plusDays(1);
// continue adding until we've reached the end
while (nextStart.isBefore(getEnd()) || nextStart.isEqual(getEnd())) {
// its okay to add the current
list.add(new DateTimeDay(currentStart, nextStart)); // depends on control dependency: [while], data = [none]
// increment both
currentStart = nextStart; // depends on control dependency: [while], data = [none]
nextStart = currentStart.plusDays(1); // depends on control dependency: [while], data = [none]
}
return list;
} } |
public class class_name {
public void setTrainingJobSummaries(java.util.Collection<TrainingJobSummary> trainingJobSummaries) {
if (trainingJobSummaries == null) {
this.trainingJobSummaries = null;
return;
}
this.trainingJobSummaries = new java.util.ArrayList<TrainingJobSummary>(trainingJobSummaries);
} } | public class class_name {
public void setTrainingJobSummaries(java.util.Collection<TrainingJobSummary> trainingJobSummaries) {
if (trainingJobSummaries == null) {
this.trainingJobSummaries = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.trainingJobSummaries = new java.util.ArrayList<TrainingJobSummary>(trainingJobSummaries);
} } |
public class class_name {
public Poi getPoi(final String NAME) {
final Poi POINT_OF_INTEREST;
if (pois.keySet().contains(NAME)) {
POINT_OF_INTEREST = pois.get(NAME);
} else {
POINT_OF_INTEREST = null;
}
return POINT_OF_INTEREST;
} } | public class class_name {
public Poi getPoi(final String NAME) {
final Poi POINT_OF_INTEREST;
if (pois.keySet().contains(NAME)) {
POINT_OF_INTEREST = pois.get(NAME); // depends on control dependency: [if], data = [none]
} else {
POINT_OF_INTEREST = null; // depends on control dependency: [if], data = [none]
}
return POINT_OF_INTEREST;
} } |
public class class_name {
@Override
protected Optional<TermType> reduceInferredTypes(ImmutableList<Optional<TermType>> argumentTypes) {
if (argumentTypes.stream().allMatch(Optional::isPresent)) {
return argumentTypes.stream()
.map(Optional::get)
.reduce(TermType::getCommonDenominator);
}
return Optional.empty();
} } | public class class_name {
@Override
protected Optional<TermType> reduceInferredTypes(ImmutableList<Optional<TermType>> argumentTypes) {
if (argumentTypes.stream().allMatch(Optional::isPresent)) {
return argumentTypes.stream()
.map(Optional::get)
.reduce(TermType::getCommonDenominator); // depends on control dependency: [if], data = [none]
}
return Optional.empty();
} } |
public class class_name {
public BulkOperation createBulkOperation(final Collection<?> objects, final boolean allOrNothing) {
try {
final PipedOutputStream out = new PipedOutputStream();
PipedInputStream in = new PipedInputStream(out);
Future<?> writeTask = executorService.submit(new Runnable() {
public void run() {
try {
bulkDocWriter.write(objects, allOrNothing, out);
} catch (Exception e) {
LOG.error("Caught exception while writing bulk document:", e);
}
}
});
return new BulkOperation(writeTask, in);
} catch (IOException e) {
throw Exceptions.propagate(e);
}
} } | public class class_name {
public BulkOperation createBulkOperation(final Collection<?> objects, final boolean allOrNothing) {
try {
final PipedOutputStream out = new PipedOutputStream();
PipedInputStream in = new PipedInputStream(out);
Future<?> writeTask = executorService.submit(new Runnable() {
public void run() {
try {
bulkDocWriter.write(objects, allOrNothing, out); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
LOG.error("Caught exception while writing bulk document:", e);
} // depends on control dependency: [catch], data = [none]
}
});
return new BulkOperation(writeTask, in); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
throw Exceptions.propagate(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static boolean isXls(InputStream in) {
final PushbackInputStream pin = IoUtil.toPushbackStream(in, 8);
try {
return FileMagic.valueOf(pin) == FileMagic.OLE2;
} catch (IOException e) {
throw new IORuntimeException(e);
}
} } | public class class_name {
public static boolean isXls(InputStream in) {
final PushbackInputStream pin = IoUtil.toPushbackStream(in, 8);
try {
return FileMagic.valueOf(pin) == FileMagic.OLE2;
// depends on control dependency: [try], data = [none]
} catch (IOException e) {
throw new IORuntimeException(e);
}
// depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public Type skipTypeVars(Type site, boolean capture) {
while (site.hasTag(TYPEVAR)) {
site = site.getUpperBound();
}
return capture ? capture(site) : site;
} } | public class class_name {
public Type skipTypeVars(Type site, boolean capture) {
while (site.hasTag(TYPEVAR)) {
site = site.getUpperBound(); // depends on control dependency: [while], data = [none]
}
return capture ? capture(site) : site;
} } |
public class class_name {
@Override
public By buildBy() {
assertValidAnnotations();
By ans = null;
for (Annotation annotation : field.getDeclaredAnnotations()) {
AbstractFindByBuilder builder = null;
if (annotation.annotationType().isAnnotationPresent(PageFactoryFinder.class)) {
try {
builder = annotation.annotationType()
.getAnnotation(PageFactoryFinder.class).value()
.newInstance();
} catch (ReflectiveOperationException e) {
// Fall through.
}
}
if (builder != null) {
ans = builder.buildIt(annotation, field);
break;
}
}
if (ans == null) {
ans = buildByFromDefault();
}
if (ans == null) {
throw new IllegalArgumentException("Cannot determine how to locate element " + field);
}
return ans;
} } | public class class_name {
@Override
public By buildBy() {
assertValidAnnotations();
By ans = null;
for (Annotation annotation : field.getDeclaredAnnotations()) {
AbstractFindByBuilder builder = null;
if (annotation.annotationType().isAnnotationPresent(PageFactoryFinder.class)) {
try {
builder = annotation.annotationType()
.getAnnotation(PageFactoryFinder.class).value()
.newInstance(); // depends on control dependency: [try], data = [none]
} catch (ReflectiveOperationException e) {
// Fall through.
} // depends on control dependency: [catch], data = [none]
}
if (builder != null) {
ans = builder.buildIt(annotation, field); // depends on control dependency: [if], data = [none]
break;
}
}
if (ans == null) {
ans = buildByFromDefault(); // depends on control dependency: [if], data = [none]
}
if (ans == null) {
throw new IllegalArgumentException("Cannot determine how to locate element " + field);
}
return ans;
} } |
public class class_name {
protected void updateCache(@Nonnull Job<?,?> job, @Nullable Run<?,?> b) {
final int n = b==null ? RESOLVES_TO_NONE : b.getNumber();
File cache = getPermalinkFile(job);
cache.getParentFile().mkdirs();
try {
String target = String.valueOf(n);
if (b != null && !new File(job.getBuildDir(), target).exists()) {
// (re)create the build Number->Id symlink
Util.createSymlink(job.getBuildDir(),b.getId(),target,TaskListener.NULL);
}
writeSymlink(cache, target);
} catch (IOException | InterruptedException e) {
LOGGER.log(Level.WARNING, "Failed to update "+job+" "+getId()+" permalink for " + b, e);
cache.delete();
}
} } | public class class_name {
protected void updateCache(@Nonnull Job<?,?> job, @Nullable Run<?,?> b) {
final int n = b==null ? RESOLVES_TO_NONE : b.getNumber();
File cache = getPermalinkFile(job);
cache.getParentFile().mkdirs();
try {
String target = String.valueOf(n);
if (b != null && !new File(job.getBuildDir(), target).exists()) {
// (re)create the build Number->Id symlink
Util.createSymlink(job.getBuildDir(),b.getId(),target,TaskListener.NULL); // depends on control dependency: [if], data = [none]
}
writeSymlink(cache, target); // depends on control dependency: [try], data = [none]
} catch (IOException | InterruptedException e) {
LOGGER.log(Level.WARNING, "Failed to update "+job+" "+getId()+" permalink for " + b, e);
cache.delete();
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public Optional<Buffer> getUser() {
// TODO: this is not 100% correct since it may
// actually contain a password as well.
if (this.userInfo != null) {
return Optional.of(this.userInfo.slice());
}
return Optional.empty();
} } | public class class_name {
@Override
public Optional<Buffer> getUser() {
// TODO: this is not 100% correct since it may
// actually contain a password as well.
if (this.userInfo != null) {
return Optional.of(this.userInfo.slice()); // depends on control dependency: [if], data = [(this.userInfo]
}
return Optional.empty();
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.