code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
private String getRootLoggerDirectory() {
String rootLoggerDirectory = null;
org.apache.log4j.Logger rootLogger = org.apache.log4j.Logger.getRootLogger();
Enumeration allAppenders = rootLogger.getAllAppenders();
if (allAppenders != null) {
while (allAppenders.hasMoreElements()) {
Appender appender = (Appender) allAppenders.nextElement();
if (appender instanceof FileAppender) {
FileAppender fileAppender = (FileAppender) appender;
String rootLoggerFile = fileAppender.getFile();
rootLoggerDirectory = new File(rootLoggerFile).getParent();
break;
}
}
}
return rootLoggerDirectory;
} } | public class class_name {
private String getRootLoggerDirectory() {
String rootLoggerDirectory = null;
org.apache.log4j.Logger rootLogger = org.apache.log4j.Logger.getRootLogger();
Enumeration allAppenders = rootLogger.getAllAppenders();
if (allAppenders != null) {
while (allAppenders.hasMoreElements()) {
Appender appender = (Appender) allAppenders.nextElement();
if (appender instanceof FileAppender) {
FileAppender fileAppender = (FileAppender) appender;
String rootLoggerFile = fileAppender.getFile();
rootLoggerDirectory = new File(rootLoggerFile).getParent(); // depends on control dependency: [if], data = [none]
break;
}
}
}
return rootLoggerDirectory;
} } |
public class class_name {
private void collectEjb10Properties(String jndiName, Object injectionObject)
throws InjectionException
{
// For EJB 1.0 compatibility only
// According to spec remove the first element
// of the name (ejb10-properties)
final String prefix = "ejb10-properties/";
if (jndiName.startsWith(prefix)) // d710771.1
{
Properties envProperties = ivNameSpaceConfig.getEnvProperties();
if (envProperties != null)
{
envProperties.put(jndiName.substring(prefix.length()), injectionObject);
}
}
} } | public class class_name {
private void collectEjb10Properties(String jndiName, Object injectionObject)
throws InjectionException
{
// For EJB 1.0 compatibility only
// According to spec remove the first element
// of the name (ejb10-properties)
final String prefix = "ejb10-properties/";
if (jndiName.startsWith(prefix)) // d710771.1
{
Properties envProperties = ivNameSpaceConfig.getEnvProperties();
if (envProperties != null)
{
envProperties.put(jndiName.substring(prefix.length()), injectionObject); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void ensureSurrounds(CmsPositionBean child, int padding) {
// increase the size of the outer rectangle
if ((getLeft() + padding) > child.getLeft()) {
int diff = getLeft() - child.getLeft();
// ensure padding
diff += padding;
setLeft(getLeft() - diff);
setWidth(getWidth() + diff);
}
if ((getTop() + padding) > child.getTop()) {
int diff = getTop() - child.getTop();
diff += padding;
setTop(getTop() - diff);
setHeight(getHeight() + diff);
}
if ((getLeft() + getWidth()) < (child.getLeft() + child.getWidth() + padding)) {
int diff = (child.getLeft() + child.getWidth()) - (getLeft() + getWidth());
diff += padding;
setWidth(getWidth() + diff);
}
if ((getTop() + getHeight()) < (child.getTop() + child.getHeight() + padding)) {
int diff = (child.getTop() + child.getHeight()) - (getTop() + getHeight());
diff += padding;
setHeight(getHeight() + diff);
}
} } | public class class_name {
public void ensureSurrounds(CmsPositionBean child, int padding) {
// increase the size of the outer rectangle
if ((getLeft() + padding) > child.getLeft()) {
int diff = getLeft() - child.getLeft();
// ensure padding
diff += padding; // depends on control dependency: [if], data = [none]
setLeft(getLeft() - diff); // depends on control dependency: [if], data = [none]
setWidth(getWidth() + diff); // depends on control dependency: [if], data = [none]
}
if ((getTop() + padding) > child.getTop()) {
int diff = getTop() - child.getTop();
diff += padding; // depends on control dependency: [if], data = [none]
setTop(getTop() - diff); // depends on control dependency: [if], data = [none]
setHeight(getHeight() + diff); // depends on control dependency: [if], data = [none]
}
if ((getLeft() + getWidth()) < (child.getLeft() + child.getWidth() + padding)) {
int diff = (child.getLeft() + child.getWidth()) - (getLeft() + getWidth());
diff += padding; // depends on control dependency: [if], data = [none]
setWidth(getWidth() + diff); // depends on control dependency: [if], data = [none]
}
if ((getTop() + getHeight()) < (child.getTop() + child.getHeight() + padding)) {
int diff = (child.getTop() + child.getHeight()) - (getTop() + getHeight());
diff += padding; // depends on control dependency: [if], data = [none]
setHeight(getHeight() + diff); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private static boolean traverseTree(PointerTargetTree syn, PointerTargetNodeList ptnl, Synset source) {
java.util.List MGListsList = syn.toList();
for (Object aMGListsList : MGListsList) {
PointerTargetNodeList MGList = (PointerTargetNodeList) aMGListsList;
for (Object aMGList : MGList) {
Synset toAdd = ((PointerTargetNode) aMGList).getSynset();
if (toAdd.equals(source)) {
return true;
}
}
}
for (Object aPtnl : ptnl) {
Synset toAdd = ((PointerTargetNode) aPtnl).getSynset();
if (toAdd.equals(source)) {
return true;
}
}
return false;
} } | public class class_name {
private static boolean traverseTree(PointerTargetTree syn, PointerTargetNodeList ptnl, Synset source) {
java.util.List MGListsList = syn.toList();
for (Object aMGListsList : MGListsList) {
PointerTargetNodeList MGList = (PointerTargetNodeList) aMGListsList;
for (Object aMGList : MGList) {
Synset toAdd = ((PointerTargetNode) aMGList).getSynset();
if (toAdd.equals(source)) {
return true;
// depends on control dependency: [if], data = [none]
}
}
}
for (Object aPtnl : ptnl) {
Synset toAdd = ((PointerTargetNode) aPtnl).getSynset();
if (toAdd.equals(source)) {
return true;
// depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
private void internStyles(Document doc, List<ExternalCss> cssContents) {
Elements els = doc.select(CSS_LINKS_SELECTOR);
for (Element e : els) {
if (!TRUE_VALUE.equals(e.attr(SKIP_INLINE))) {
String path = e.attr(HREF_ATTR);
Element style = new Element(Tag.valueOf(STYLE_TAG), "");
style.appendChild(new DataNode(getCss(cssContents, path), ""));
e.replaceWith(style);
}
}
} } | public class class_name {
private void internStyles(Document doc, List<ExternalCss> cssContents) {
Elements els = doc.select(CSS_LINKS_SELECTOR);
for (Element e : els) {
if (!TRUE_VALUE.equals(e.attr(SKIP_INLINE))) {
String path = e.attr(HREF_ATTR);
Element style = new Element(Tag.valueOf(STYLE_TAG), "");
style.appendChild(new DataNode(getCss(cssContents, path), "")); // depends on control dependency: [if], data = [none]
e.replaceWith(style); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public Location<T> calculateDriftOffset (Location<T> centerOfMass, Array<SlotAssignment<T>> slotAssignments,
FormationPattern<T> pattern) {
// Clear the center of mass
centerOfMass.getPosition().setZero();
float centerOfMassOrientation = 0;
// Make sure tempLocation is instantiated
if (tempLocation == null) tempLocation = centerOfMass.newLocation();
T centerOfMassPos = centerOfMass.getPosition();
T tempLocationPos = tempLocation.getPosition();
// Go through each assignment and add its contribution to the center
float numberOfAssignments = slotAssignments.size;
for (int i = 0; i < numberOfAssignments; i++) {
pattern.calculateSlotLocation(tempLocation, slotAssignments.get(i).slotNumber);
centerOfMassPos.add(tempLocationPos);
centerOfMassOrientation += tempLocation.getOrientation();
}
// Divide through to get the drift offset.
centerOfMassPos.scl(1f / numberOfAssignments);
centerOfMassOrientation /= numberOfAssignments;
centerOfMass.setOrientation(centerOfMassOrientation);
return centerOfMass;
} } | public class class_name {
public Location<T> calculateDriftOffset (Location<T> centerOfMass, Array<SlotAssignment<T>> slotAssignments,
FormationPattern<T> pattern) {
// Clear the center of mass
centerOfMass.getPosition().setZero();
float centerOfMassOrientation = 0;
// Make sure tempLocation is instantiated
if (tempLocation == null) tempLocation = centerOfMass.newLocation();
T centerOfMassPos = centerOfMass.getPosition();
T tempLocationPos = tempLocation.getPosition();
// Go through each assignment and add its contribution to the center
float numberOfAssignments = slotAssignments.size;
for (int i = 0; i < numberOfAssignments; i++) {
pattern.calculateSlotLocation(tempLocation, slotAssignments.get(i).slotNumber); // depends on control dependency: [for], data = [i]
centerOfMassPos.add(tempLocationPos); // depends on control dependency: [for], data = [none]
centerOfMassOrientation += tempLocation.getOrientation(); // depends on control dependency: [for], data = [none]
}
// Divide through to get the drift offset.
centerOfMassPos.scl(1f / numberOfAssignments);
centerOfMassOrientation /= numberOfAssignments;
centerOfMass.setOrientation(centerOfMassOrientation);
return centerOfMass;
} } |
public class class_name {
public boolean parse(Path ttmlFilePath) {
boolean ret;
try {
final DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setNamespaceAware(true);
final DocumentBuilder db = dbf.newDocumentBuilder();
doc = db.parse(ttmlFilePath.toFile());
//Check that we have TTML v1.0 file as we have tested only them...
final NodeList metaData = doc.getElementsByTagName("ebuttm:documentEbuttVersion");
if (metaData != null) {
final Node versionNode = metaData.item(0);
if (versionNode == null || !versionNode.getTextContent().equalsIgnoreCase("v1.0")) {
throw new Exception("Unknown TTML file version");
}
} else {
throw new Exception("Unknown File Format");
}
buildColorMap();
buildFilmList();
ret = true;
} catch (Exception ex) {
//Log.errorLog(912036478, ex, new String[]{ex.getLocalizedMessage(), "File: " + ttmlFilePath});
Log.errorLog(912036478, new String[]{ex.getLocalizedMessage(), "File: " + ttmlFilePath});
ret = false;
}
return ret;
} } | public class class_name {
public boolean parse(Path ttmlFilePath) {
boolean ret;
try {
final DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setNamespaceAware(true); // depends on control dependency: [try], data = [none]
final DocumentBuilder db = dbf.newDocumentBuilder();
doc = db.parse(ttmlFilePath.toFile()); // depends on control dependency: [try], data = [none]
//Check that we have TTML v1.0 file as we have tested only them...
final NodeList metaData = doc.getElementsByTagName("ebuttm:documentEbuttVersion");
if (metaData != null) {
final Node versionNode = metaData.item(0);
if (versionNode == null || !versionNode.getTextContent().equalsIgnoreCase("v1.0")) {
throw new Exception("Unknown TTML file version");
}
} else {
throw new Exception("Unknown File Format");
}
buildColorMap(); // depends on control dependency: [try], data = [none]
buildFilmList(); // depends on control dependency: [try], data = [none]
ret = true; // depends on control dependency: [try], data = [none]
} catch (Exception ex) {
//Log.errorLog(912036478, ex, new String[]{ex.getLocalizedMessage(), "File: " + ttmlFilePath});
Log.errorLog(912036478, new String[]{ex.getLocalizedMessage(), "File: " + ttmlFilePath});
ret = false;
} // depends on control dependency: [catch], data = [none]
return ret;
} } |
public class class_name {
public java.util.List<String> getExcludedMembers() {
if (excludedMembers == null) {
excludedMembers = new com.amazonaws.internal.SdkInternalList<String>();
}
return excludedMembers;
} } | public class class_name {
public java.util.List<String> getExcludedMembers() {
if (excludedMembers == null) {
excludedMembers = new com.amazonaws.internal.SdkInternalList<String>(); // depends on control dependency: [if], data = [none]
}
return excludedMembers;
} } |
public class class_name {
public static void addJ2CIdentityService(J2CIdentityService j2cIdentityService) {
if (j2cIdentityService != null) {
j2cIdentityServices.add(j2cIdentityService);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "A J2CIdentityService implementation was added.", j2cIdentityService.getClass().getName());
}
}
} } | public class class_name {
public static void addJ2CIdentityService(J2CIdentityService j2cIdentityService) {
if (j2cIdentityService != null) {
j2cIdentityServices.add(j2cIdentityService); // depends on control dependency: [if], data = [(j2cIdentityService]
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "A J2CIdentityService implementation was added.", j2cIdentityService.getClass().getName()); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public boolean skipLine() throws IOException {
checkClosed();
// clear public column values for current line
columnsCount = 0;
boolean skippedLine = false;
if (hasMoreData) {
boolean foundEol = false;
do {
if (dataBuffer.Position == dataBuffer.Count) {
checkDataLength();
} else {
skippedLine = true;
// grab the current letter as a char
char currentLetter = dataBuffer.Buffer[dataBuffer.Position];
if (currentLetter == Letters.CR
|| currentLetter == Letters.LF) {
foundEol = true;
}
// keep track of the last letter because we need
// it for several key decisions
lastLetter = currentLetter;
if (!foundEol) {
dataBuffer.Position++;
}
} // end else
} while (hasMoreData && !foundEol);
columnBuffer.Position = 0;
dataBuffer.LineStart = dataBuffer.Position + 1;
}
rawBuffer.Position = 0;
rawRecord = "";
return skippedLine;
} } | public class class_name {
public boolean skipLine() throws IOException {
checkClosed();
// clear public column values for current line
columnsCount = 0;
boolean skippedLine = false;
if (hasMoreData) {
boolean foundEol = false;
do {
if (dataBuffer.Position == dataBuffer.Count) {
checkDataLength(); // depends on control dependency: [if], data = [none]
} else {
skippedLine = true; // depends on control dependency: [if], data = [none]
// grab the current letter as a char
char currentLetter = dataBuffer.Buffer[dataBuffer.Position];
if (currentLetter == Letters.CR
|| currentLetter == Letters.LF) {
foundEol = true; // depends on control dependency: [if], data = [none]
}
// keep track of the last letter because we need
// it for several key decisions
lastLetter = currentLetter; // depends on control dependency: [if], data = [none]
if (!foundEol) {
dataBuffer.Position++; // depends on control dependency: [if], data = [none]
}
} // end else
} while (hasMoreData && !foundEol);
columnBuffer.Position = 0;
dataBuffer.LineStart = dataBuffer.Position + 1;
}
rawBuffer.Position = 0;
rawRecord = "";
return skippedLine;
} } |
public class class_name {
public void setLevel2CacheLocation(String locationExpr) {
if (null != locationExpr) {
// property name handling in location
String location = propertyReplace(locationExpr);
// create caching directory
File dir = new File(location);
if (!dir.isDirectory()) {
if (dir.exists()) {
log.error("Location {} for 2nd level cache should be a directory.", location);
throw new RuntimeException("Invalid location for setLevel2CacheLocation, " + location + //NOPMD
" has to be a directory."); //NOPMD
} else {
if (!dir.mkdirs()) {
log.warn("Directory {} for 2nd level cache could not be created.", location);
}
}
}
this.location = location;
}
} } | public class class_name {
public void setLevel2CacheLocation(String locationExpr) {
if (null != locationExpr) {
// property name handling in location
String location = propertyReplace(locationExpr);
// create caching directory
File dir = new File(location);
if (!dir.isDirectory()) {
if (dir.exists()) {
log.error("Location {} for 2nd level cache should be a directory.", location); // depends on control dependency: [if], data = [none]
throw new RuntimeException("Invalid location for setLevel2CacheLocation, " + location + //NOPMD
" has to be a directory."); //NOPMD
} else {
if (!dir.mkdirs()) {
log.warn("Directory {} for 2nd level cache could not be created.", location); // depends on control dependency: [if], data = [none]
}
}
}
this.location = location; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public List<String> getFieldNames() {
List<String> ret = new ArrayList<String>();
DOType typ = this;
while(typ != null) {
ret.addAll(typ.getDeclaredFieldNames());
typ = typ.getSuperType();
}
return ret;
} } | public class class_name {
public List<String> getFieldNames() {
List<String> ret = new ArrayList<String>();
DOType typ = this;
while(typ != null) {
ret.addAll(typ.getDeclaredFieldNames()); // depends on control dependency: [while], data = [(typ]
typ = typ.getSuperType(); // depends on control dependency: [while], data = [none]
}
return ret;
} } |
public class class_name {
private Filter getFilter(String layerFilter, String[] featureIds) throws GeomajasException {
Filter filter = null;
if (null != layerFilter) {
filter = filterService.parseFilter(layerFilter);
}
if (null != featureIds) {
Filter fidFilter = filterService.createFidFilter(featureIds);
if (null == filter) {
filter = fidFilter;
} else {
filter = filterService.createAndFilter(filter, fidFilter);
}
}
return filter;
} } | public class class_name {
private Filter getFilter(String layerFilter, String[] featureIds) throws GeomajasException {
Filter filter = null;
if (null != layerFilter) {
filter = filterService.parseFilter(layerFilter);
}
if (null != featureIds) {
Filter fidFilter = filterService.createFidFilter(featureIds);
if (null == filter) {
filter = fidFilter; // depends on control dependency: [if], data = [none]
} else {
filter = filterService.createAndFilter(filter, fidFilter); // depends on control dependency: [if], data = [none]
}
}
return filter;
} } |
public class class_name {
public boolean processCharWithTextParser(final char ch) {
textParser = textParser.processChar(ch, textParserContext);
if (textParser == TextParserState.DELIMITER_REACHED) {
parsedText = textParserContext.getText();
textParser = TextParserState.LEADING_WHITESPACE;
textParserContext.reset();
return true;
}
return false;
} } | public class class_name {
public boolean processCharWithTextParser(final char ch) {
textParser = textParser.processChar(ch, textParserContext);
if (textParser == TextParserState.DELIMITER_REACHED) {
parsedText = textParserContext.getText(); // depends on control dependency: [if], data = [none]
textParser = TextParserState.LEADING_WHITESPACE; // depends on control dependency: [if], data = [none]
textParserContext.reset(); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
@Override
@OverridingMethodsMustInvokeSuper
protected void onDestroy (@Nonnull final IScope aScopeInDestruction) throws Exception
{
// Destroy factory
if (m_aFactory != null)
{
if (m_aFactory.isOpen ())
{
// Clear cache
try
{
m_aFactory.getCache ().evictAll ();
}
catch (final PersistenceException ex)
{
// May happen if now database connection is available
}
// Close
m_aFactory.close ();
}
m_aFactory = null;
}
LOGGER.info ("Closed EntityManagerFactory for persistence unit '" + m_sPersistenceUnitName + "'");
} } | public class class_name {
@Override
@OverridingMethodsMustInvokeSuper
protected void onDestroy (@Nonnull final IScope aScopeInDestruction) throws Exception
{
// Destroy factory
if (m_aFactory != null)
{
if (m_aFactory.isOpen ())
{
// Clear cache
try
{
m_aFactory.getCache ().evictAll (); // depends on control dependency: [try], data = [none]
}
catch (final PersistenceException ex)
{
// May happen if now database connection is available
} // depends on control dependency: [catch], data = [none]
// Close
m_aFactory.close ();
}
m_aFactory = null;
}
LOGGER.info ("Closed EntityManagerFactory for persistence unit '" + m_sPersistenceUnitName + "'");
} } |
public class class_name {
public static ConcatVectorTable readFromProto(ConcatVectorTableProto.ConcatVectorTable proto) {
int[] neighborSizes = new int[proto.getDimensionSizeCount()];
for (int i = 0; i < neighborSizes.length; i++) {
neighborSizes[i] = proto.getDimensionSize(i);
}
ConcatVectorTable factor = new ConcatVectorTable(neighborSizes);
int i = 0;
for (int[] assignment : factor) {
final ConcatVector vector = ConcatVector.readFromProto(proto.getFactorTable(i));
factor.setAssignmentValue(assignment, () -> vector);
i++;
}
return factor;
} } | public class class_name {
public static ConcatVectorTable readFromProto(ConcatVectorTableProto.ConcatVectorTable proto) {
int[] neighborSizes = new int[proto.getDimensionSizeCount()];
for (int i = 0; i < neighborSizes.length; i++) {
neighborSizes[i] = proto.getDimensionSize(i); // depends on control dependency: [for], data = [i]
}
ConcatVectorTable factor = new ConcatVectorTable(neighborSizes);
int i = 0;
for (int[] assignment : factor) {
final ConcatVector vector = ConcatVector.readFromProto(proto.getFactorTable(i));
factor.setAssignmentValue(assignment, () -> vector); // depends on control dependency: [for], data = [assignment]
i++; // depends on control dependency: [for], data = [none]
}
return factor;
} } |
public class class_name {
private void validate() {
if (Strings.emptyToNull(random) == null) {
random = Strings.emptyToNull(getProject().getProperty(SYSPROP_RANDOM_SEED()));
}
if (random == null) {
throw new BuildException("Required attribute 'seed' must not be empty. Look at <junit4:pickseed>.");
}
long[] seeds = SeedUtils.parseSeedChain(random);
if (seeds.length < 1) {
throw new BuildException("Random seed is required.");
}
if (values.isEmpty() && !allowUndefined) {
throw new BuildException("No values to pick from and allowUndefined=false.");
}
} } | public class class_name {
private void validate() {
if (Strings.emptyToNull(random) == null) {
random = Strings.emptyToNull(getProject().getProperty(SYSPROP_RANDOM_SEED())); // depends on control dependency: [if], data = [none]
}
if (random == null) {
throw new BuildException("Required attribute 'seed' must not be empty. Look at <junit4:pickseed>.");
}
long[] seeds = SeedUtils.parseSeedChain(random);
if (seeds.length < 1) {
throw new BuildException("Random seed is required.");
}
if (values.isEmpty() && !allowUndefined) {
throw new BuildException("No values to pick from and allowUndefined=false.");
}
} } |
public class class_name {
public static <T> void forceUpdateAllProperties(BeanDescriptor<T> beanDescriptor, T model) {
EntityBeanIntercept intercept = ((EntityBean) model)._ebean_getIntercept();
intercept.setLoaded();
int idIndex = beanDescriptor.getIdProperty().getPropertyIndex();
for (int i = 0; i < intercept.getPropertyLength(); i++) {
if (i != idIndex) {
intercept.markPropertyAsChanged(i);
intercept.setLoadedProperty(i);
}
}
} } | public class class_name {
public static <T> void forceUpdateAllProperties(BeanDescriptor<T> beanDescriptor, T model) {
EntityBeanIntercept intercept = ((EntityBean) model)._ebean_getIntercept();
intercept.setLoaded();
int idIndex = beanDescriptor.getIdProperty().getPropertyIndex();
for (int i = 0; i < intercept.getPropertyLength(); i++) {
if (i != idIndex) {
intercept.markPropertyAsChanged(i); // depends on control dependency: [if], data = [(i]
intercept.setLoadedProperty(i); // depends on control dependency: [if], data = [(i]
}
}
} } |
public class class_name {
public void info(Throwable t) {
if (isEnabled() && isInfoEnabled()) {
dispatchLogException(new LogEvent(this, LogEvent.INFO_TYPE, t));
}
} } | public class class_name {
public void info(Throwable t) {
if (isEnabled() && isInfoEnabled()) {
dispatchLogException(new LogEvent(this, LogEvent.INFO_TYPE, t)); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public URI addAPIKey(String uri) {
try {
final URIBuilder builder = new URIBuilder(uri);
if (apiAccessKey != null) {
builder.setParameter("key", apiAccessKey);
}
return builder.build();
} catch (URISyntaxException e) {
throw new RedmineInternalError(e);
}
} } | public class class_name {
public URI addAPIKey(String uri) {
try {
final URIBuilder builder = new URIBuilder(uri);
if (apiAccessKey != null) {
builder.setParameter("key", apiAccessKey); // depends on control dependency: [if], data = [none]
}
return builder.build(); // depends on control dependency: [try], data = [none]
} catch (URISyntaxException e) {
throw new RedmineInternalError(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public synchronized void shutdownTaskScheduler(){
if (scheduler != null && !scheduler.isShutdown()) {
scheduler.shutdown();
logger.info("shutdowned the task scheduler. No longer accepting new tasks");
scheduler = null;
}
} } | public class class_name {
public synchronized void shutdownTaskScheduler(){
if (scheduler != null && !scheduler.isShutdown()) {
scheduler.shutdown(); // depends on control dependency: [if], data = [none]
logger.info("shutdowned the task scheduler. No longer accepting new tasks"); // depends on control dependency: [if], data = [none]
scheduler = null; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void setEntitlements(java.util.Collection<GrantEntitlementRequest> entitlements) {
if (entitlements == null) {
this.entitlements = null;
return;
}
this.entitlements = new java.util.ArrayList<GrantEntitlementRequest>(entitlements);
} } | public class class_name {
public void setEntitlements(java.util.Collection<GrantEntitlementRequest> entitlements) {
if (entitlements == null) {
this.entitlements = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.entitlements = new java.util.ArrayList<GrantEntitlementRequest>(entitlements);
} } |
public class class_name {
public void marshall(StandardsSubscription standardsSubscription, ProtocolMarshaller protocolMarshaller) {
if (standardsSubscription == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(standardsSubscription.getStandardsSubscriptionArn(), STANDARDSSUBSCRIPTIONARN_BINDING);
protocolMarshaller.marshall(standardsSubscription.getStandardsArn(), STANDARDSARN_BINDING);
protocolMarshaller.marshall(standardsSubscription.getStandardsInput(), STANDARDSINPUT_BINDING);
protocolMarshaller.marshall(standardsSubscription.getStandardsStatus(), STANDARDSSTATUS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(StandardsSubscription standardsSubscription, ProtocolMarshaller protocolMarshaller) {
if (standardsSubscription == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(standardsSubscription.getStandardsSubscriptionArn(), STANDARDSSUBSCRIPTIONARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(standardsSubscription.getStandardsArn(), STANDARDSARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(standardsSubscription.getStandardsInput(), STANDARDSINPUT_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(standardsSubscription.getStandardsStatus(), STANDARDSSTATUS_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public <T> PageData<T> page(PageRequest pageRequest, final String sql, final Map params, final String[] sortColumns, final Class[] target) {
if (pageRequest == null) {
pageRequest = getDefaultPageRequest();
}
// check sort
String sortColumName = null;
boolean ascending = true;
int sort = pageRequest.getSort();
if (sort != 0) {
ascending = sort > 0;
if (!ascending) {
sort = -sort;
}
int index = sort - 1;
if (index >= sortColumns.length) {
index = 1;
}
sortColumName = sortColumns[index];
}
// page
int page = pageRequest.getPage();
int pageSize = pageRequest.getSize();
PageData<T> pageData = page(sql, params, page, pageSize, sortColumName, ascending, target);
// fix the out-of-bounds
if (pageData.getItems().isEmpty() && pageData.currentPage != 0) {
if (pageData.currentPage != page) {
// out of bounds
int newPage = pageData.getCurrentPage();
pageData = page(sql, params, newPage, pageSize, sortColumName, ascending, target);
}
}
return pageData;
} } | public class class_name {
public <T> PageData<T> page(PageRequest pageRequest, final String sql, final Map params, final String[] sortColumns, final Class[] target) {
if (pageRequest == null) {
pageRequest = getDefaultPageRequest(); // depends on control dependency: [if], data = [none]
}
// check sort
String sortColumName = null;
boolean ascending = true;
int sort = pageRequest.getSort();
if (sort != 0) {
ascending = sort > 0; // depends on control dependency: [if], data = [none]
if (!ascending) {
sort = -sort; // depends on control dependency: [if], data = [none]
}
int index = sort - 1;
if (index >= sortColumns.length) {
index = 1; // depends on control dependency: [if], data = [none]
}
sortColumName = sortColumns[index]; // depends on control dependency: [if], data = [none]
}
// page
int page = pageRequest.getPage();
int pageSize = pageRequest.getSize();
PageData<T> pageData = page(sql, params, page, pageSize, sortColumName, ascending, target);
// fix the out-of-bounds
if (pageData.getItems().isEmpty() && pageData.currentPage != 0) {
if (pageData.currentPage != page) {
// out of bounds
int newPage = pageData.getCurrentPage();
pageData = page(sql, params, newPage, pageSize, sortColumName, ascending, target); // depends on control dependency: [if], data = [none]
}
}
return pageData;
} } |
public class class_name {
protected void setBeanDeploymentArchivesAccessibility() {
for (WeldBeanDeploymentArchive beanDeploymentArchive : beanDeploymentArchives) {
Set<WeldBeanDeploymentArchive> accessibleArchives = new HashSet<>();
for (WeldBeanDeploymentArchive candidate : beanDeploymentArchives) {
if (candidate.equals(beanDeploymentArchive)) {
continue;
}
accessibleArchives.add(candidate);
}
beanDeploymentArchive.setAccessibleBeanDeploymentArchives(accessibleArchives);
}
} } | public class class_name {
protected void setBeanDeploymentArchivesAccessibility() {
for (WeldBeanDeploymentArchive beanDeploymentArchive : beanDeploymentArchives) {
Set<WeldBeanDeploymentArchive> accessibleArchives = new HashSet<>();
for (WeldBeanDeploymentArchive candidate : beanDeploymentArchives) {
if (candidate.equals(beanDeploymentArchive)) {
continue;
}
accessibleArchives.add(candidate); // depends on control dependency: [for], data = [candidate]
}
beanDeploymentArchive.setAccessibleBeanDeploymentArchives(accessibleArchives); // depends on control dependency: [for], data = [beanDeploymentArchive]
}
} } |
public class class_name {
private void removeNode(ItemState item, int indexItem)
{
items.remove(indexItem);
index.remove(item.getData().getIdentifier());
index.remove(item.getData().getQPath());
index.remove(new ParentIDQPathBasedKey(item));
index.remove(new IDStateBasedKey(item.getData().getIdentifier(), item.getState()));
childNodesInfo.remove(item.getData().getIdentifier());
lastChildNodeStates.remove(item.getData().getIdentifier());
childNodeStates.remove(item.getData().getIdentifier());
if (allPathsChanged != null && item.isPathChanged())
{
allPathsChanged.remove(item);
if (allPathsChanged.isEmpty())
allPathsChanged = null;
}
if (item.isPersisted())
{
int childInfo[] = childNodesInfo.get(item.getData().getParentIdentifier());
if (childInfo != null)
{
if (item.isDeleted())
{
++childInfo[CHILD_NODES_COUNT];
}
else if (item.isAdded())
{
--childInfo[CHILD_NODES_COUNT];
}
childNodesInfo.put(item.getData().getParentIdentifier(), childInfo);
}
}
Map<String, ItemState> children = lastChildNodeStates.get(item.getData().getParentIdentifier());
if (children != null)
{
children.remove(item.getData().getIdentifier());
if (children.isEmpty())
{
lastChildNodeStates.remove(item.getData().getParentIdentifier());
}
}
List<ItemState> listItemStates = childNodeStates.get(item.getData().getParentIdentifier());
if (listItemStates != null)
{
listItemStates.remove(item);
if (listItemStates.isEmpty())
{
childNodeStates.remove(item.getData().getParentIdentifier());
}
}
if ((children == null || children.isEmpty()) && (listItemStates == null || listItemStates.isEmpty()))
{
childNodesInfo.remove(item.getData().getParentIdentifier());
}
} } | public class class_name {
private void removeNode(ItemState item, int indexItem)
{
items.remove(indexItem);
index.remove(item.getData().getIdentifier());
index.remove(item.getData().getQPath());
index.remove(new ParentIDQPathBasedKey(item));
index.remove(new IDStateBasedKey(item.getData().getIdentifier(), item.getState()));
childNodesInfo.remove(item.getData().getIdentifier());
lastChildNodeStates.remove(item.getData().getIdentifier());
childNodeStates.remove(item.getData().getIdentifier());
if (allPathsChanged != null && item.isPathChanged())
{
allPathsChanged.remove(item); // depends on control dependency: [if], data = [none]
if (allPathsChanged.isEmpty())
allPathsChanged = null;
}
if (item.isPersisted())
{
int childInfo[] = childNodesInfo.get(item.getData().getParentIdentifier());
if (childInfo != null)
{
if (item.isDeleted())
{
++childInfo[CHILD_NODES_COUNT]; // depends on control dependency: [if], data = [none]
}
else if (item.isAdded())
{
--childInfo[CHILD_NODES_COUNT]; // depends on control dependency: [if], data = [none]
}
childNodesInfo.put(item.getData().getParentIdentifier(), childInfo); // depends on control dependency: [if], data = [none]
}
}
Map<String, ItemState> children = lastChildNodeStates.get(item.getData().getParentIdentifier());
if (children != null)
{
children.remove(item.getData().getIdentifier()); // depends on control dependency: [if], data = [none]
if (children.isEmpty())
{
lastChildNodeStates.remove(item.getData().getParentIdentifier()); // depends on control dependency: [if], data = [none]
}
}
List<ItemState> listItemStates = childNodeStates.get(item.getData().getParentIdentifier());
if (listItemStates != null)
{
listItemStates.remove(item); // depends on control dependency: [if], data = [none]
if (listItemStates.isEmpty())
{
childNodeStates.remove(item.getData().getParentIdentifier()); // depends on control dependency: [if], data = [none]
}
}
if ((children == null || children.isEmpty()) && (listItemStates == null || listItemStates.isEmpty()))
{
childNodesInfo.remove(item.getData().getParentIdentifier()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public Map<Integer, List<Integer>> getUserRevisionIds(int userid)
throws WikiApiException
{
Map<Integer, List<Integer>> revIds = new HashMap<Integer, List<Integer>>();
try {
if (userid < 1) {
throw new IllegalArgumentException();
}
if (!indexExists("revisions", "userids")) {
System.err.println("You should create and index for the field ContributorID: create index userids ON revisions(ContributorId(15));");
}
PreparedStatement statement = null;
ResultSet result = null;
try {
statement = connection.prepareStatement("SELECT ArticleID, RevisionID "
+ "FROM revisions WHERE ContributorId=?");
statement.setInt(1, userid);
result = statement.executeQuery();
// Make the query
if (result == null) {
throw new WikiPageNotFoundException("No revisions for user " + userid);
}
while (result.next()) {
int artId = result.getInt(1);
int revId = result.getInt(2);
if (revIds.containsKey(artId)) {
revIds.get(artId).add(revId);
}
else {
List<Integer> revList = new ArrayList<Integer>();
revList.add(revId);
revIds.put(artId, revList);
}
}
}
finally {
if (statement != null) {
statement.close();
}
if (result != null) {
result.close();
}
}
return revIds;
}
catch (WikiApiException e) {
throw e;
}
catch (Exception e) {
throw new WikiApiException(e);
}
} } | public class class_name {
public Map<Integer, List<Integer>> getUserRevisionIds(int userid)
throws WikiApiException
{
Map<Integer, List<Integer>> revIds = new HashMap<Integer, List<Integer>>();
try {
if (userid < 1) {
throw new IllegalArgumentException();
}
if (!indexExists("revisions", "userids")) {
System.err.println("You should create and index for the field ContributorID: create index userids ON revisions(ContributorId(15));"); // depends on control dependency: [if], data = [none]
}
PreparedStatement statement = null;
ResultSet result = null;
try {
statement = connection.prepareStatement("SELECT ArticleID, RevisionID "
+ "FROM revisions WHERE ContributorId=?"); // depends on control dependency: [try], data = [none]
statement.setInt(1, userid); // depends on control dependency: [try], data = [none]
result = statement.executeQuery(); // depends on control dependency: [try], data = [none]
// Make the query
if (result == null) {
throw new WikiPageNotFoundException("No revisions for user " + userid);
}
while (result.next()) {
int artId = result.getInt(1);
int revId = result.getInt(2);
if (revIds.containsKey(artId)) {
revIds.get(artId).add(revId); // depends on control dependency: [if], data = [none]
}
else {
List<Integer> revList = new ArrayList<Integer>();
revList.add(revId); // depends on control dependency: [if], data = [none]
revIds.put(artId, revList); // depends on control dependency: [if], data = [none]
}
}
}
finally {
if (statement != null) {
statement.close(); // depends on control dependency: [if], data = [none]
}
if (result != null) {
result.close(); // depends on control dependency: [if], data = [none]
}
}
return revIds;
}
catch (WikiApiException e) {
throw e;
}
catch (Exception e) {
throw new WikiApiException(e);
}
} } |
public class class_name {
public Impl embedded(CharSequence ref, List<Resource> resourceList) {
if (StringUtils.isNotEmpty(ref) && resourceList != null) {
List<Resource> resources = this.embeddedMap.computeIfAbsent(ref, charSequence -> new ArrayList<>());
resources.addAll(resourceList);
}
return (Impl) this;
} } | public class class_name {
public Impl embedded(CharSequence ref, List<Resource> resourceList) {
if (StringUtils.isNotEmpty(ref) && resourceList != null) {
List<Resource> resources = this.embeddedMap.computeIfAbsent(ref, charSequence -> new ArrayList<>());
resources.addAll(resourceList); // depends on control dependency: [if], data = [none]
}
return (Impl) this;
} } |
public class class_name {
@Override
public String getOperationParameterDescription(String operationName, String paramName, Locale locale, ResourceBundle bundle) {
if (reuseAttributesForAdd && ADD.equals(operationName)) {
return bundle.getString(getBundleKey(paramName));
}
return bundle.getString(getBundleKey(operationName, paramName));
} } | public class class_name {
@Override
public String getOperationParameterDescription(String operationName, String paramName, Locale locale, ResourceBundle bundle) {
if (reuseAttributesForAdd && ADD.equals(operationName)) {
return bundle.getString(getBundleKey(paramName)); // depends on control dependency: [if], data = [none]
}
return bundle.getString(getBundleKey(operationName, paramName));
} } |
public class class_name {
public final void multiplyValues(double multiplier) {
for(Map.Entry<Object, Object> entry : internalData.entrySet()) {
Double previousValue = TypeInference.toDouble(entry.getValue());
if(previousValue==null) {
continue;
}
internalData.put(entry.getKey(), previousValue*multiplier);
}
} } | public class class_name {
public final void multiplyValues(double multiplier) {
for(Map.Entry<Object, Object> entry : internalData.entrySet()) {
Double previousValue = TypeInference.toDouble(entry.getValue());
if(previousValue==null) {
continue;
}
internalData.put(entry.getKey(), previousValue*multiplier); // depends on control dependency: [for], data = [entry]
}
} } |
public class class_name {
public void emit(final StringBuilder out, final Block root)
{
root.removeSurroundingEmptyLines();
switch (root.type)
{
case RULER:
this.config.decorator.horizontalRuler(out);
return;
case NONE:
case XML:
break;
case HEADLINE:
this.config.decorator.openHeadline(out, root.hlDepth);
if (this.useExtensions && root.id != null)
{
out.append(" id=\"");
Utils.appendCode(out, root.id, 0, root.id.length());
out.append('"');
}
out.append('>');
break;
case PARAGRAPH:
this.config.decorator.openParagraph(out);
break;
case CODE:
case FENCED_CODE:
if (this.config.codeBlockEmitter == null)
{
this.config.decorator.openCodeBlock(out);
}
break;
case BLOCKQUOTE:
this.config.decorator.openBlockquote(out);
break;
case UNORDERED_LIST:
this.config.decorator.openUnorderedList(out);
break;
case ORDERED_LIST:
this.config.decorator.openOrderedList(out);
break;
case LIST_ITEM:
this.config.decorator.openListItem(out);
if (this.useExtensions && root.id != null)
{
out.append(" id=\"");
Utils.appendCode(out, root.id, 0, root.id.length());
out.append('"');
}
out.append('>');
break;
}
if (root.hasLines())
{
this.emitLines(out, root);
}
else
{
Block block = root.blocks;
while (block != null)
{
this.emit(out, block);
block = block.next;
}
}
switch (root.type)
{
case RULER:
case NONE:
case XML:
break;
case HEADLINE:
this.config.decorator.closeHeadline(out, root.hlDepth);
break;
case PARAGRAPH:
this.config.decorator.closeParagraph(out);
break;
case CODE:
case FENCED_CODE:
if (this.config.codeBlockEmitter == null)
{
this.config.decorator.closeCodeBlock(out);
}
break;
case BLOCKQUOTE:
this.config.decorator.closeBlockquote(out);
break;
case UNORDERED_LIST:
this.config.decorator.closeUnorderedList(out);
break;
case ORDERED_LIST:
this.config.decorator.closeOrderedList(out);
break;
case LIST_ITEM:
this.config.decorator.closeListItem(out);
break;
}
} } | public class class_name {
public void emit(final StringBuilder out, final Block root)
{
root.removeSurroundingEmptyLines();
switch (root.type)
{
case RULER:
this.config.decorator.horizontalRuler(out);
return;
case NONE:
case XML:
break;
case HEADLINE:
this.config.decorator.openHeadline(out, root.hlDepth);
if (this.useExtensions && root.id != null)
{
out.append(" id=\""); // depends on control dependency: [if], data = [none]
Utils.appendCode(out, root.id, 0, root.id.length()); // depends on control dependency: [if], data = [none]
out.append('"'); // depends on control dependency: [if], data = [none]
}
out.append('>');
break;
case PARAGRAPH:
this.config.decorator.openParagraph(out);
break;
case CODE:
case FENCED_CODE:
if (this.config.codeBlockEmitter == null)
{
this.config.decorator.openCodeBlock(out); // depends on control dependency: [if], data = [none]
}
break;
case BLOCKQUOTE:
this.config.decorator.openBlockquote(out);
break;
case UNORDERED_LIST:
this.config.decorator.openUnorderedList(out);
break;
case ORDERED_LIST:
this.config.decorator.openOrderedList(out);
break;
case LIST_ITEM:
this.config.decorator.openListItem(out);
if (this.useExtensions && root.id != null)
{
out.append(" id=\""); // depends on control dependency: [if], data = [none]
Utils.appendCode(out, root.id, 0, root.id.length()); // depends on control dependency: [if], data = [none]
out.append('"'); // depends on control dependency: [if], data = [none]
}
out.append('>');
break;
}
if (root.hasLines())
{
this.emitLines(out, root); // depends on control dependency: [if], data = [none]
}
else
{
Block block = root.blocks;
while (block != null)
{
this.emit(out, block); // depends on control dependency: [while], data = [none]
block = block.next; // depends on control dependency: [while], data = [none]
}
}
switch (root.type)
{
case RULER:
case NONE:
case XML:
break;
case HEADLINE:
this.config.decorator.closeHeadline(out, root.hlDepth);
break;
case PARAGRAPH:
this.config.decorator.closeParagraph(out);
break;
case CODE:
case FENCED_CODE:
if (this.config.codeBlockEmitter == null)
{
this.config.decorator.closeCodeBlock(out); // depends on control dependency: [if], data = [none]
}
break;
case BLOCKQUOTE:
this.config.decorator.closeBlockquote(out);
break;
case UNORDERED_LIST:
this.config.decorator.closeUnorderedList(out);
break;
case ORDERED_LIST:
this.config.decorator.closeOrderedList(out);
break;
case LIST_ITEM:
this.config.decorator.closeListItem(out);
break;
}
} } |
public class class_name {
public KeyArea setupKey(int iKeyArea)
{
KeyArea keyArea = null;
if (iKeyArea == 0)
{
keyArea = this.makeIndex(DBConstants.UNIQUE, ID_KEY);
keyArea.addKeyField(ID, DBConstants.ASCENDING);
}
if (iKeyArea == 1)
{
keyArea = this.makeIndex(DBConstants.SECONDARY_KEY, USER_NAME_KEY);
keyArea.addKeyField(USER_NAME, DBConstants.ASCENDING);
}
if (keyArea == null)
keyArea = super.setupKey(iKeyArea);
return keyArea;
} } | public class class_name {
public KeyArea setupKey(int iKeyArea)
{
KeyArea keyArea = null;
if (iKeyArea == 0)
{
keyArea = this.makeIndex(DBConstants.UNIQUE, ID_KEY); // depends on control dependency: [if], data = [none]
keyArea.addKeyField(ID, DBConstants.ASCENDING); // depends on control dependency: [if], data = [none]
}
if (iKeyArea == 1)
{
keyArea = this.makeIndex(DBConstants.SECONDARY_KEY, USER_NAME_KEY); // depends on control dependency: [if], data = [none]
keyArea.addKeyField(USER_NAME, DBConstants.ASCENDING); // depends on control dependency: [if], data = [none]
}
if (keyArea == null)
keyArea = super.setupKey(iKeyArea);
return keyArea;
} } |
public class class_name {
public void marshall(UpdateLayerRequest updateLayerRequest, ProtocolMarshaller protocolMarshaller) {
if (updateLayerRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateLayerRequest.getLayerId(), LAYERID_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getName(), NAME_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getShortname(), SHORTNAME_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getAttributes(), ATTRIBUTES_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getCloudWatchLogsConfiguration(), CLOUDWATCHLOGSCONFIGURATION_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getCustomInstanceProfileArn(), CUSTOMINSTANCEPROFILEARN_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getCustomJson(), CUSTOMJSON_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getCustomSecurityGroupIds(), CUSTOMSECURITYGROUPIDS_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getPackages(), PACKAGES_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getVolumeConfigurations(), VOLUMECONFIGURATIONS_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getEnableAutoHealing(), ENABLEAUTOHEALING_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getAutoAssignElasticIps(), AUTOASSIGNELASTICIPS_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getAutoAssignPublicIps(), AUTOASSIGNPUBLICIPS_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getCustomRecipes(), CUSTOMRECIPES_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getInstallUpdatesOnBoot(), INSTALLUPDATESONBOOT_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getUseEbsOptimizedInstances(), USEEBSOPTIMIZEDINSTANCES_BINDING);
protocolMarshaller.marshall(updateLayerRequest.getLifecycleEventConfiguration(), LIFECYCLEEVENTCONFIGURATION_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(UpdateLayerRequest updateLayerRequest, ProtocolMarshaller protocolMarshaller) {
if (updateLayerRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateLayerRequest.getLayerId(), LAYERID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getShortname(), SHORTNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getAttributes(), ATTRIBUTES_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getCloudWatchLogsConfiguration(), CLOUDWATCHLOGSCONFIGURATION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getCustomInstanceProfileArn(), CUSTOMINSTANCEPROFILEARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getCustomJson(), CUSTOMJSON_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getCustomSecurityGroupIds(), CUSTOMSECURITYGROUPIDS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getPackages(), PACKAGES_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getVolumeConfigurations(), VOLUMECONFIGURATIONS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getEnableAutoHealing(), ENABLEAUTOHEALING_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getAutoAssignElasticIps(), AUTOASSIGNELASTICIPS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getAutoAssignPublicIps(), AUTOASSIGNPUBLICIPS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getCustomRecipes(), CUSTOMRECIPES_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getInstallUpdatesOnBoot(), INSTALLUPDATESONBOOT_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getUseEbsOptimizedInstances(), USEEBSOPTIMIZEDINSTANCES_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateLayerRequest.getLifecycleEventConfiguration(), LIFECYCLEEVENTCONFIGURATION_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static <V> LinkedList<ResultTerm<V>> segment(final char[] charArray, AhoCorasickDoubleArrayTrie<V> trie)
{
LinkedList<ResultTerm<V>> termList = new LinkedList<ResultTerm<V>>();
final ResultTerm<V>[] wordNet = new ResultTerm[charArray.length];
trie.parseText(charArray, new AhoCorasickDoubleArrayTrie.IHit<V>()
{
@Override
public void hit(int begin, int end, V value)
{
if (wordNet[begin] == null || wordNet[begin].word.length() < end - begin)
{
wordNet[begin] = new ResultTerm<V>(new String(charArray, begin, end - begin), value, begin);
}
}
});
for (int i = 0; i < charArray.length;)
{
if (wordNet[i] == null)
{
StringBuilder sbTerm = new StringBuilder();
int offset = i;
while (i < charArray.length && wordNet[i] == null)
{
sbTerm.append(charArray[i]);
++i;
}
termList.add(new ResultTerm<V>(sbTerm.toString(), null, offset));
}
else
{
termList.add(wordNet[i]);
i += wordNet[i].word.length();
}
}
return termList;
} } | public class class_name {
public static <V> LinkedList<ResultTerm<V>> segment(final char[] charArray, AhoCorasickDoubleArrayTrie<V> trie)
{
LinkedList<ResultTerm<V>> termList = new LinkedList<ResultTerm<V>>();
final ResultTerm<V>[] wordNet = new ResultTerm[charArray.length];
trie.parseText(charArray, new AhoCorasickDoubleArrayTrie.IHit<V>()
{
@Override
public void hit(int begin, int end, V value)
{
if (wordNet[begin] == null || wordNet[begin].word.length() < end - begin)
{
wordNet[begin] = new ResultTerm<V>(new String(charArray, begin, end - begin), value, begin); // depends on control dependency: [if], data = [none]
}
}
});
for (int i = 0; i < charArray.length;)
{
if (wordNet[i] == null)
{
StringBuilder sbTerm = new StringBuilder();
int offset = i;
while (i < charArray.length && wordNet[i] == null)
{
sbTerm.append(charArray[i]); // depends on control dependency: [while], data = [none]
++i; // depends on control dependency: [while], data = [none]
}
termList.add(new ResultTerm<V>(sbTerm.toString(), null, offset)); // depends on control dependency: [if], data = [none]
}
else
{
termList.add(wordNet[i]); // depends on control dependency: [if], data = [(wordNet[i]]
i += wordNet[i].word.length(); // depends on control dependency: [if], data = [none]
}
}
return termList;
} } |
public class class_name {
public String format(final MessageResolver messageResolver, final MessageInterpolator messageInterpolator) {
for(String code : getCodes()) {
Optional<String> message = messageResolver.getMessage(code);
if(message.isPresent()) {
return messageInterpolator.interpolate(message.get(), getVariables(), true, messageResolver);
}
}
// デフォルトメッセージはBeanValidationのとき変数を追加している場合があるため、再度フォーマットする。
return messageInterpolator.interpolate(getDefaultMessage(), getVariables(), true, messageResolver);
} } | public class class_name {
public String format(final MessageResolver messageResolver, final MessageInterpolator messageInterpolator) {
for(String code : getCodes()) {
Optional<String> message = messageResolver.getMessage(code);
if(message.isPresent()) {
return messageInterpolator.interpolate(message.get(), getVariables(), true, messageResolver);
// depends on control dependency: [if], data = [none]
}
}
// デフォルトメッセージはBeanValidationのとき変数を追加している場合があるため、再度フォーマットする。
return messageInterpolator.interpolate(getDefaultMessage(), getVariables(), true, messageResolver);
} } |
public class class_name {
public List<List<RESOURCE>> partitionMediumResources(List<RESOURCE> resources, Map<Long, Integer> days) {
if (resources.isEmpty()) {
return Collections.<List<RESOURCE>> emptyList();
}
RESOURCE firstRes = resources.get(0);
int lastDay = days.get(new Long(getDateLastModified(firstRes))).intValue();
List<List<RESOURCE>> result = new ArrayList<List<RESOURCE>>();
List<RESOURCE> currentGroup = new ArrayList<RESOURCE>();
result.add(currentGroup);
for (RESOURCE res : resources) {
LOG.debug("Processing medium-aged resource " + getRootPath(res));
int day = days.get(new Long(getDateLastModified(res))).intValue();
if (day != lastDay) {
LOG.debug("=== new group ===");
currentGroup = new ArrayList<RESOURCE>();
result.add(currentGroup);
}
lastDay = day;
currentGroup.add(res);
}
return result;
} } | public class class_name {
public List<List<RESOURCE>> partitionMediumResources(List<RESOURCE> resources, Map<Long, Integer> days) {
if (resources.isEmpty()) {
return Collections.<List<RESOURCE>> emptyList(); // depends on control dependency: [if], data = [none]
}
RESOURCE firstRes = resources.get(0);
int lastDay = days.get(new Long(getDateLastModified(firstRes))).intValue();
List<List<RESOURCE>> result = new ArrayList<List<RESOURCE>>();
List<RESOURCE> currentGroup = new ArrayList<RESOURCE>();
result.add(currentGroup);
for (RESOURCE res : resources) {
LOG.debug("Processing medium-aged resource " + getRootPath(res)); // depends on control dependency: [for], data = [res]
int day = days.get(new Long(getDateLastModified(res))).intValue();
if (day != lastDay) {
LOG.debug("=== new group ==="); // depends on control dependency: [if], data = [none]
currentGroup = new ArrayList<RESOURCE>(); // depends on control dependency: [if], data = [none]
result.add(currentGroup); // depends on control dependency: [if], data = [none]
}
lastDay = day; // depends on control dependency: [for], data = [none]
currentGroup.add(res); // depends on control dependency: [for], data = [res]
}
return result;
} } |
public class class_name {
public boolean addHandler(PacketHandler handler) {
synchronized (this.handlers){
if (!handlers.contains(handler)) {
handlers.add(handler);
this.count.incrementAndGet();
Collections.sort(this.handlers, REVERSE_COMPARATOR);
return true;
}
return false;
}
} } | public class class_name {
public boolean addHandler(PacketHandler handler) {
synchronized (this.handlers){
if (!handlers.contains(handler)) {
handlers.add(handler); // depends on control dependency: [if], data = [none]
this.count.incrementAndGet(); // depends on control dependency: [if], data = [none]
Collections.sort(this.handlers, REVERSE_COMPARATOR); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
}
} } |
public class class_name {
public void setUrl(String url) {
if (url != null && url.trim().length() > 0) {
if (Url.isUrl(url)) {
this.url = url;
} else {
throw new IllegalArgumentException(url + " is not a valid URL.");
}
}
} } | public class class_name {
public void setUrl(String url) {
if (url != null && url.trim().length() > 0) {
if (Url.isUrl(url)) {
this.url = url; // depends on control dependency: [if], data = [none]
} else {
throw new IllegalArgumentException(url + " is not a valid URL.");
}
}
} } |
public class class_name {
@Override
public Character doDeserialize( JsonReader reader, JsonDeserializationContext ctx, JsonDeserializerParameters params ) {
if ( JsonToken.NUMBER.equals( reader.peek() ) ) {
return (char) reader.nextInt();
} else {
String value = reader.nextString();
if ( value.isEmpty() ) {
return null;
}
return value.charAt( 0 );
}
} } | public class class_name {
@Override
public Character doDeserialize( JsonReader reader, JsonDeserializationContext ctx, JsonDeserializerParameters params ) {
if ( JsonToken.NUMBER.equals( reader.peek() ) ) {
return (char) reader.nextInt(); // depends on control dependency: [if], data = [none]
} else {
String value = reader.nextString();
if ( value.isEmpty() ) {
return null; // depends on control dependency: [if], data = [none]
}
return value.charAt( 0 ); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
protected void preparePaintComponent(final Request request) {
if (!isInitialised()) {
MyDataBean myBean = new MyDataBean();
myBean.setName("My Bean");
myBean.addBean(new SomeDataBean("blah", "more blah"));
myBean.addBean(new SomeDataBean());
repeaterFields.setData(myBean);
setInitialised(true);
}
} } | public class class_name {
@Override
protected void preparePaintComponent(final Request request) {
if (!isInitialised()) {
MyDataBean myBean = new MyDataBean();
myBean.setName("My Bean"); // depends on control dependency: [if], data = [none]
myBean.addBean(new SomeDataBean("blah", "more blah")); // depends on control dependency: [if], data = [none]
myBean.addBean(new SomeDataBean()); // depends on control dependency: [if], data = [none]
repeaterFields.setData(myBean); // depends on control dependency: [if], data = [none]
setInitialised(true); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public boolean skip(ParserToken token) {
if (token == null) {
return false;
}
if (isPrefix(token)) {
if (prefixHandlers.containsKey(token.getType())) {
return prefixHandlers.get(token.getType()) instanceof PrefixSkipHandler;
} else {
return prefixSkipHandler != null;
}
}
return false;
} } | public class class_name {
public boolean skip(ParserToken token) {
if (token == null) {
return false; // depends on control dependency: [if], data = [none]
}
if (isPrefix(token)) {
if (prefixHandlers.containsKey(token.getType())) {
return prefixHandlers.get(token.getType()) instanceof PrefixSkipHandler; // depends on control dependency: [if], data = [none]
} else {
return prefixSkipHandler != null; // depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
@Synchronized
final void start(Optional<String> name) {
if (!this.isStarted) {
this.startImpl(name);
this.isStarted = true;
}
} } | public class class_name {
@Synchronized
final void start(Optional<String> name) {
if (!this.isStarted) {
this.startImpl(name); // depends on control dependency: [if], data = [none]
this.isStarted = true; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String toLowerFirstChar(final String s)
{
if (Character.isLowerCase(s.charAt(0)))
{
return s;
}
return Character.toLowerCase(s.charAt(0)) + s.substring(1);
} } | public class class_name {
public static String toLowerFirstChar(final String s)
{
if (Character.isLowerCase(s.charAt(0)))
{
return s; // depends on control dependency: [if], data = [none]
}
return Character.toLowerCase(s.charAt(0)) + s.substring(1);
} } |
public class class_name {
private double algorithm1(String[] tTokens, String[] uTokens,
double[] tIdfArray, double[] uIdfArray) {
ArrayList candidateList = obtainCandidateList(tTokens, uTokens, tIdfArray,
uIdfArray);
sortCandidateList(candidateList);
double scoreValue = 0.0;
HashMap tMap = new HashMap(),
uMap = new HashMap();
Iterator it = candidateList.iterator();
while (it.hasNext()) {
Candidates actualCandidates = (Candidates) it.next();
Integer tPos = new Integer(actualCandidates.getTPos());
Integer uPos = new Integer(actualCandidates.getUPos());
if ( (!tMap.containsKey(tPos)) &&
(!uMap.containsKey(uPos))) {
double actualScore = actualCandidates.getScore();
scoreValue += actualScore;
tMap.put(tPos, null);
uMap.put(uPos, null);
}
}
return scoreValue;
} } | public class class_name {
private double algorithm1(String[] tTokens, String[] uTokens,
double[] tIdfArray, double[] uIdfArray) {
ArrayList candidateList = obtainCandidateList(tTokens, uTokens, tIdfArray,
uIdfArray);
sortCandidateList(candidateList);
double scoreValue = 0.0;
HashMap tMap = new HashMap(),
uMap = new HashMap();
Iterator it = candidateList.iterator();
while (it.hasNext()) {
Candidates actualCandidates = (Candidates) it.next();
Integer tPos = new Integer(actualCandidates.getTPos());
Integer uPos = new Integer(actualCandidates.getUPos());
if ( (!tMap.containsKey(tPos)) &&
(!uMap.containsKey(uPos))) {
double actualScore = actualCandidates.getScore();
scoreValue += actualScore;
// depends on control dependency: [if], data = [none]
tMap.put(tPos, null);
// depends on control dependency: [if], data = [none]
uMap.put(uPos, null);
// depends on control dependency: [if], data = [none]
}
}
return scoreValue;
} } |
public class class_name {
void sortVocab()
{
Arrays.sort(vocab, 0, vocabSize);
// re-build vocabIndexMap
final int size = vocabSize;
trainWords = 0;
table = new int[size];
for (int i = 0; i < size; i++)
{
VocabWord word = vocab[i];
// Words occuring less than min_count times will be discarded from the vocab
if (word.cn < config.getMinCount())
{
table[vocabIndexMap.get(word.word)] = -4;
vocabSize--;
}
else
{
// Hash will be re-computed, as after the sorting it is not actual
table[vocabIndexMap.get(word.word)] = i;
setVocabIndexMap(word, i);
}
}
// lose weight
vocabIndexMap = null;
VocabWord[] nvocab = new VocabWord[vocabSize];
System.arraycopy(vocab, 0, nvocab, 0, vocabSize);
} } | public class class_name {
void sortVocab()
{
Arrays.sort(vocab, 0, vocabSize);
// re-build vocabIndexMap
final int size = vocabSize;
trainWords = 0;
table = new int[size];
for (int i = 0; i < size; i++)
{
VocabWord word = vocab[i];
// Words occuring less than min_count times will be discarded from the vocab
if (word.cn < config.getMinCount())
{
table[vocabIndexMap.get(word.word)] = -4; // depends on control dependency: [if], data = [none]
vocabSize--; // depends on control dependency: [if], data = [none]
}
else
{
// Hash will be re-computed, as after the sorting it is not actual
table[vocabIndexMap.get(word.word)] = i; // depends on control dependency: [if], data = [none]
setVocabIndexMap(word, i); // depends on control dependency: [if], data = [none]
}
}
// lose weight
vocabIndexMap = null;
VocabWord[] nvocab = new VocabWord[vocabSize];
System.arraycopy(vocab, 0, nvocab, 0, vocabSize);
} } |
public class class_name {
public CreateClusterRequest withStep(StepConfig step) {
if (this.steps == null) {
this.steps = new ArrayList<StepConfig>();
}
this.steps.add(step);
return this;
} } | public class class_name {
public CreateClusterRequest withStep(StepConfig step) {
if (this.steps == null) {
this.steps = new ArrayList<StepConfig>(); // depends on control dependency: [if], data = [none]
}
this.steps.add(step);
return this;
} } |
public class class_name {
public HashMultimap<WordNetRelation, Sense> getRelatedSenses(@NonNull Sense sense) {
HashMultimap<WordNetRelation, Sense> map = HashMultimap.create();
for (Map.Entry<Sense, WordNetRelation> entry : db
.getRelations(sense)
.entrySet()) {
map.put(entry.getValue(), entry.getKey());
}
return map;
} } | public class class_name {
public HashMultimap<WordNetRelation, Sense> getRelatedSenses(@NonNull Sense sense) {
HashMultimap<WordNetRelation, Sense> map = HashMultimap.create();
for (Map.Entry<Sense, WordNetRelation> entry : db
.getRelations(sense)
.entrySet()) {
map.put(entry.getValue(), entry.getKey()); // depends on control dependency: [for], data = [entry]
}
return map;
} } |
public class class_name {
@SuppressWarnings("unchecked")
public ManagerResponse buildResponse(Class<? extends ManagerResponse> responseClass, Map<String, Object> attributes)
{
final ManagerResponse response;
final String responseType = (String) attributes.get(RESPONSE_KEY);
if (RESPONSE_TYPE_ERROR.equalsIgnoreCase(responseType))
{
response = new ManagerError();
}
else if (responseClass == null)
{
response = new ManagerResponse();
}
else
{
try
{
response = responseClass.newInstance();
}
catch (Exception ex)
{
logger.error("Unable to create new instance of " + responseClass.getName(), ex);
return null;
}
}
setAttributes(response, attributes, ignoredAttributes);
if (response instanceof CommandResponse)
{
final CommandResponse commandResponse = (CommandResponse) response;
final List<String> result = new ArrayList<>();
//For Asterisk 14
if(attributes.get(OUTPUT_RESPONSE_KEY) != null){
if(attributes.get(OUTPUT_RESPONSE_KEY) instanceof List){
for(String tmp : (List<String>)attributes.get(OUTPUT_RESPONSE_KEY)){
if(tmp != null && tmp.length() != 0){
result.add(tmp.trim());
}
}
}else{
result.add((String)attributes.get(OUTPUT_RESPONSE_KEY));
}
}else{
for (String resultLine : ((String) attributes.get(ManagerReader.COMMAND_RESULT_RESPONSE_KEY)).split("\n"))
{
// on error there is a leading space
if (!resultLine.equals("--END COMMAND--") && !resultLine.equals(" --END COMMAND--"))
{
result.add(resultLine);
}
}
}
commandResponse.setResult(result);
}
if (response.getResponse() != null && attributes.get(PROXY_RESPONSE_KEY) != null)
{
response.setResponse((String) attributes.get(PROXY_RESPONSE_KEY));
}
// make the map of all attributes available to the response
// but clone it as it is reused by the ManagerReader
response.setAttributes(new HashMap<>(attributes));
return response;
} } | public class class_name {
@SuppressWarnings("unchecked")
public ManagerResponse buildResponse(Class<? extends ManagerResponse> responseClass, Map<String, Object> attributes)
{
final ManagerResponse response;
final String responseType = (String) attributes.get(RESPONSE_KEY);
if (RESPONSE_TYPE_ERROR.equalsIgnoreCase(responseType))
{
response = new ManagerError(); // depends on control dependency: [if], data = [none]
}
else if (responseClass == null)
{
response = new ManagerResponse(); // depends on control dependency: [if], data = [none]
}
else
{
try
{
response = responseClass.newInstance(); // depends on control dependency: [try], data = [none]
}
catch (Exception ex)
{
logger.error("Unable to create new instance of " + responseClass.getName(), ex);
return null;
} // depends on control dependency: [catch], data = [none]
}
setAttributes(response, attributes, ignoredAttributes);
if (response instanceof CommandResponse)
{
final CommandResponse commandResponse = (CommandResponse) response;
final List<String> result = new ArrayList<>();
//For Asterisk 14
if(attributes.get(OUTPUT_RESPONSE_KEY) != null){
if(attributes.get(OUTPUT_RESPONSE_KEY) instanceof List){
for(String tmp : (List<String>)attributes.get(OUTPUT_RESPONSE_KEY)){
if(tmp != null && tmp.length() != 0){
result.add(tmp.trim()); // depends on control dependency: [if], data = [(tmp]
}
}
}else{
result.add((String)attributes.get(OUTPUT_RESPONSE_KEY)); // depends on control dependency: [if], data = [none]
}
}else{
for (String resultLine : ((String) attributes.get(ManagerReader.COMMAND_RESULT_RESPONSE_KEY)).split("\n"))
{
// on error there is a leading space
if (!resultLine.equals("--END COMMAND--") && !resultLine.equals(" --END COMMAND--"))
{
result.add(resultLine); // depends on control dependency: [if], data = [none]
}
}
}
commandResponse.setResult(result); // depends on control dependency: [if], data = [none]
}
if (response.getResponse() != null && attributes.get(PROXY_RESPONSE_KEY) != null)
{
response.setResponse((String) attributes.get(PROXY_RESPONSE_KEY)); // depends on control dependency: [if], data = [none]
}
// make the map of all attributes available to the response
// but clone it as it is reused by the ManagerReader
response.setAttributes(new HashMap<>(attributes));
return response;
} } |
public class class_name {
@SuppressWarnings({"unused", "WeakerAccess"})
protected static CleverTapInstanceConfig createInstance(@NonNull String jsonString){
try {
return new CleverTapInstanceConfig(jsonString);
} catch (Throwable t) {
return null;
}
} } | public class class_name {
@SuppressWarnings({"unused", "WeakerAccess"})
protected static CleverTapInstanceConfig createInstance(@NonNull String jsonString){
try {
return new CleverTapInstanceConfig(jsonString); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
return null;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public void commitStatements() {
if (log.isDebugEnabled()) {
log.debug(String.format("Received transaction commit: '%s':%n",
endpointConfiguration.getServerConfiguration().getDatabaseName()));
}
if(!endpointConfiguration.isAutoTransactionHandling()){
handleMessageAndCheckResponse(JdbcMessage.commitTransaction());
}
} } | public class class_name {
@Override
public void commitStatements() {
if (log.isDebugEnabled()) {
log.debug(String.format("Received transaction commit: '%s':%n",
endpointConfiguration.getServerConfiguration().getDatabaseName())); // depends on control dependency: [if], data = [none]
}
if(!endpointConfiguration.isAutoTransactionHandling()){
handleMessageAndCheckResponse(JdbcMessage.commitTransaction()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public java.util.List<ScheduledInstancesIpv6Address> getIpv6Addresses() {
if (ipv6Addresses == null) {
ipv6Addresses = new com.amazonaws.internal.SdkInternalList<ScheduledInstancesIpv6Address>();
}
return ipv6Addresses;
} } | public class class_name {
public java.util.List<ScheduledInstancesIpv6Address> getIpv6Addresses() {
if (ipv6Addresses == null) {
ipv6Addresses = new com.amazonaws.internal.SdkInternalList<ScheduledInstancesIpv6Address>(); // depends on control dependency: [if], data = [none]
}
return ipv6Addresses;
} } |
public class class_name {
public void marshall(TtmlDestinationSettings ttmlDestinationSettings, ProtocolMarshaller protocolMarshaller) {
if (ttmlDestinationSettings == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(ttmlDestinationSettings.getStyleControl(), STYLECONTROL_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(TtmlDestinationSettings ttmlDestinationSettings, ProtocolMarshaller protocolMarshaller) {
if (ttmlDestinationSettings == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(ttmlDestinationSettings.getStyleControl(), STYLECONTROL_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private void initTopicTree() {
try {
Object data = view.getViewData();
if (!(data instanceof TopicTree)) {
return;
}
TopicTree topicTree = (TopicTree) data;
HelpTopicNode baseNode;
if (helpViewType == HelpViewType.TOC) {
HelpTopic topic = new HelpTopic(null, hs.getName(), hs.getName());
baseNode = new HelpTopicNode(topic);
rootNode.addChild(baseNode);
} else {
baseNode = rootNode;
}
initTopicTree(baseNode, topicTree.getRoot());
} catch (IOException e) {
return;
}
} } | public class class_name {
private void initTopicTree() {
try {
Object data = view.getViewData();
if (!(data instanceof TopicTree)) {
return; // depends on control dependency: [if], data = [none]
}
TopicTree topicTree = (TopicTree) data;
HelpTopicNode baseNode;
if (helpViewType == HelpViewType.TOC) {
HelpTopic topic = new HelpTopic(null, hs.getName(), hs.getName());
baseNode = new HelpTopicNode(topic); // depends on control dependency: [if], data = [none]
rootNode.addChild(baseNode); // depends on control dependency: [if], data = [none]
} else {
baseNode = rootNode; // depends on control dependency: [if], data = [none]
}
initTopicTree(baseNode, topicTree.getRoot()); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
return;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void subtract (Area area) {
if (area == null || isEmpty() || area.isEmpty()) {
return;
}
if (isPolygonal() && area.isPolygonal()) {
subtractPolygon(area);
} else {
subtractCurvePolygon(area);
}
if (areaBoundsSquare() < GeometryUtil.EPSILON) {
reset();
}
} } | public class class_name {
public void subtract (Area area) {
if (area == null || isEmpty() || area.isEmpty()) {
return; // depends on control dependency: [if], data = [none]
}
if (isPolygonal() && area.isPolygonal()) {
subtractPolygon(area); // depends on control dependency: [if], data = [none]
} else {
subtractCurvePolygon(area); // depends on control dependency: [if], data = [none]
}
if (areaBoundsSquare() < GeometryUtil.EPSILON) {
reset(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public String findSymbol(char[] buffer, int start, int len, int hash)
{
// Sanity check:
if (len < 1) {
return EMPTY_STRING;
}
hash &= mIndexMask;
String sym = mSymbols[hash];
// Optimal case; checking existing primary symbol for hash index:
if (sym != null) {
// Let's inline primary String equality checking:
if (sym.length() == len) {
int i = 0;
do {
if (sym.charAt(i) != buffer[start+i]) {
break;
}
} while (++i < len);
// Optimal case; primary match found
if (i == len) {
return sym;
}
}
// How about collision bucket?
Bucket b = mBuckets[hash >> 1];
if (b != null) {
sym = b.find(buffer, start, len);
if (sym != null) {
return sym;
}
}
}
// Need to expand?
if (mSize >= mSizeThreshold) {
rehash();
/* Need to recalc hash; rare occurence (index mask has been
* recalculated as part of rehash)
*/
hash = calcHash(buffer, start, len) & mIndexMask;
} else if (!mDirty) {
// Or perhaps we need to do copy-on-write?
copyArrays();
mDirty = true;
}
++mSize;
String newSymbol = new String(buffer, start, len);
if (mInternStrings) {
newSymbol = newSymbol.intern();
}
// Ok; do we need to add primary entry, or a bucket?
if (mSymbols[hash] == null) {
mSymbols[hash] = newSymbol;
} else {
int bix = hash >> 1;
mBuckets[bix] = new Bucket(newSymbol, mBuckets[bix]);
}
return newSymbol;
} } | public class class_name {
public String findSymbol(char[] buffer, int start, int len, int hash)
{
// Sanity check:
if (len < 1) {
return EMPTY_STRING; // depends on control dependency: [if], data = [none]
}
hash &= mIndexMask;
String sym = mSymbols[hash];
// Optimal case; checking existing primary symbol for hash index:
if (sym != null) {
// Let's inline primary String equality checking:
if (sym.length() == len) {
int i = 0;
do {
if (sym.charAt(i) != buffer[start+i]) {
break;
}
} while (++i < len);
// Optimal case; primary match found
if (i == len) {
return sym; // depends on control dependency: [if], data = [none]
}
}
// How about collision bucket?
Bucket b = mBuckets[hash >> 1];
if (b != null) {
sym = b.find(buffer, start, len); // depends on control dependency: [if], data = [(b]
if (sym != null) {
return sym; // depends on control dependency: [if], data = [none]
}
}
}
// Need to expand?
if (mSize >= mSizeThreshold) {
rehash(); // depends on control dependency: [if], data = [none]
/* Need to recalc hash; rare occurence (index mask has been
* recalculated as part of rehash)
*/
hash = calcHash(buffer, start, len) & mIndexMask; // depends on control dependency: [if], data = [none]
} else if (!mDirty) {
// Or perhaps we need to do copy-on-write?
copyArrays(); // depends on control dependency: [if], data = [none]
mDirty = true; // depends on control dependency: [if], data = [none]
}
++mSize;
String newSymbol = new String(buffer, start, len);
if (mInternStrings) {
newSymbol = newSymbol.intern(); // depends on control dependency: [if], data = [none]
}
// Ok; do we need to add primary entry, or a bucket?
if (mSymbols[hash] == null) {
mSymbols[hash] = newSymbol; // depends on control dependency: [if], data = [none]
} else {
int bix = hash >> 1;
mBuckets[bix] = new Bucket(newSymbol, mBuckets[bix]); // depends on control dependency: [if], data = [none]
}
return newSymbol;
} } |
public class class_name {
private Range<String> transformClosed(Range<String> range) {
if (range.hasLowerBound()) {
String lower = range.lowerEndpoint();
// the special case, (a, _] and apply(a) == a is handled by skipping a
String afterLower = domain.next(apply(lower));
if (afterLower != null) {
if (range.hasUpperBound()) {
String upper = range.upperEndpoint();
String upperImage = apply(upper);
// meaning: at the endpoint
if (upper.equals(upperImage) && range.isUpperBoundClosed()) {
// include upper
return Ranges.closed(afterLower, upperImage);
} else {
String beforeUpper = domain.previous(upperImage);
if (afterLower.compareTo(beforeUpper) <= 0) {
return Ranges.closed(afterLower, beforeUpper);
}
}
} else {
return Ranges.atLeast(afterLower);
}
}
} else if (range.hasUpperBound()) {
String upper = range.upperEndpoint();
String upperImage = apply(upper);
if (upper.equals(upperImage) && range.isUpperBoundClosed()) {
// include upper
return Ranges.atMost(upperImage);
} else {
String beforeUpper = domain.previous(upperImage);
if (beforeUpper != null) {
return Ranges.atMost(beforeUpper);
}
}
}
return null;
} } | public class class_name {
private Range<String> transformClosed(Range<String> range) {
if (range.hasLowerBound()) {
String lower = range.lowerEndpoint();
// the special case, (a, _] and apply(a) == a is handled by skipping a
String afterLower = domain.next(apply(lower));
if (afterLower != null) {
if (range.hasUpperBound()) {
String upper = range.upperEndpoint();
String upperImage = apply(upper);
// meaning: at the endpoint
if (upper.equals(upperImage) && range.isUpperBoundClosed()) {
// include upper
return Ranges.closed(afterLower, upperImage); // depends on control dependency: [if], data = [none]
} else {
String beforeUpper = domain.previous(upperImage);
if (afterLower.compareTo(beforeUpper) <= 0) {
return Ranges.closed(afterLower, beforeUpper); // depends on control dependency: [if], data = [none]
}
}
} else {
return Ranges.atLeast(afterLower); // depends on control dependency: [if], data = [none]
}
}
} else if (range.hasUpperBound()) {
String upper = range.upperEndpoint();
String upperImage = apply(upper);
if (upper.equals(upperImage) && range.isUpperBoundClosed()) {
// include upper
return Ranges.atMost(upperImage); // depends on control dependency: [if], data = [none]
} else {
String beforeUpper = domain.previous(upperImage);
if (beforeUpper != null) {
return Ranges.atMost(beforeUpper); // depends on control dependency: [if], data = [(beforeUpper]
}
}
}
return null;
} } |
public class class_name {
@PublicEvolving
public SplitDataProperties<OUT> getSplitDataProperties() {
if (this.splitDataProperties == null) {
this.splitDataProperties = new SplitDataProperties<OUT>(this);
}
return this.splitDataProperties;
} } | public class class_name {
@PublicEvolving
public SplitDataProperties<OUT> getSplitDataProperties() {
if (this.splitDataProperties == null) {
this.splitDataProperties = new SplitDataProperties<OUT>(this); // depends on control dependency: [if], data = [none]
}
return this.splitDataProperties;
} } |
public class class_name {
protected static boolean matchesOne(String str, Pattern... patterns) {
for (Pattern pattern : patterns) {
if (pattern.matcher(str).matches()) {
return true;
}
}
return false;
} } | public class class_name {
protected static boolean matchesOne(String str, Pattern... patterns) {
for (Pattern pattern : patterns) {
if (pattern.matcher(str).matches()) {
return true;
// depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
public boolean contains(Object value) {
if (value instanceof Integer) {
return (Integer) value >= getFrom() && (Integer) value <= getTo();
}
if (value instanceof BigInteger) {
BigInteger bigint = (BigInteger) value;
return bigint.compareTo(BigInteger.valueOf(getFrom())) >= 0 &&
bigint.compareTo(BigInteger.valueOf(getTo())) <= 0;
}
return false;
} } | public class class_name {
public boolean contains(Object value) {
if (value instanceof Integer) {
return (Integer) value >= getFrom() && (Integer) value <= getTo(); // depends on control dependency: [if], data = [none]
}
if (value instanceof BigInteger) {
BigInteger bigint = (BigInteger) value;
return bigint.compareTo(BigInteger.valueOf(getFrom())) >= 0 &&
bigint.compareTo(BigInteger.valueOf(getTo())) <= 0; // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
@Override
public void shutdown() {
if (!active.compareAndSet(true, false) ) {
throw new IllegalArgumentException("shutdown() called for inactive pool: " + getName());
}
log.info("Shutdown triggered on {}", getName());
Set<HClient> clients = new HashSet<HClient>();
availableClientQueue.drainTo(clients);
if ( clients.size() > 0 ) {
for (HClient hClient : clients) {
hClient.close();
}
}
log.info("Shutdown complete on {}", getName());
} } | public class class_name {
@Override
public void shutdown() {
if (!active.compareAndSet(true, false) ) {
throw new IllegalArgumentException("shutdown() called for inactive pool: " + getName());
}
log.info("Shutdown triggered on {}", getName());
Set<HClient> clients = new HashSet<HClient>();
availableClientQueue.drainTo(clients);
if ( clients.size() > 0 ) {
for (HClient hClient : clients) {
hClient.close(); // depends on control dependency: [for], data = [hClient]
}
}
log.info("Shutdown complete on {}", getName());
} } |
public class class_name {
public static Config propertiesToConfig(Properties properties, Optional<String> prefix) {
Set<String> blacklistedKeys = new HashSet<>();
if (properties.containsKey(GOBBLIN_CONFIG_BLACKLIST_KEYS)) {
blacklistedKeys = new HashSet<>(Splitter.on(',').omitEmptyStrings().trimResults()
.splitToList(properties.getProperty(GOBBLIN_CONFIG_BLACKLIST_KEYS)));
}
Set<String> fullPrefixKeys = findFullPrefixKeys(properties, prefix);
ImmutableMap.Builder<String, Object> immutableMapBuilder = ImmutableMap.builder();
for (Map.Entry<Object, Object> entry : properties.entrySet()) {
String entryKey = entry.getKey().toString();
if (StringUtils.startsWith(entryKey, prefix.or(StringUtils.EMPTY)) &&
!blacklistedKeys.contains(entryKey)) {
if (fullPrefixKeys.contains(entryKey)) {
entryKey = sanitizeFullPrefixKey(entryKey);
} else if (entryKey.endsWith(STRIP_SUFFIX)) {
throw new RuntimeException("Properties are not allowed to end in " + STRIP_SUFFIX);
}
immutableMapBuilder.put(entryKey, entry.getValue());
}
}
return ConfigFactory.parseMap(immutableMapBuilder.build());
} } | public class class_name {
public static Config propertiesToConfig(Properties properties, Optional<String> prefix) {
Set<String> blacklistedKeys = new HashSet<>();
if (properties.containsKey(GOBBLIN_CONFIG_BLACKLIST_KEYS)) {
blacklistedKeys = new HashSet<>(Splitter.on(',').omitEmptyStrings().trimResults()
.splitToList(properties.getProperty(GOBBLIN_CONFIG_BLACKLIST_KEYS))); // depends on control dependency: [if], data = [none]
}
Set<String> fullPrefixKeys = findFullPrefixKeys(properties, prefix);
ImmutableMap.Builder<String, Object> immutableMapBuilder = ImmutableMap.builder();
for (Map.Entry<Object, Object> entry : properties.entrySet()) {
String entryKey = entry.getKey().toString();
if (StringUtils.startsWith(entryKey, prefix.or(StringUtils.EMPTY)) &&
!blacklistedKeys.contains(entryKey)) {
if (fullPrefixKeys.contains(entryKey)) {
entryKey = sanitizeFullPrefixKey(entryKey); // depends on control dependency: [if], data = [none]
} else if (entryKey.endsWith(STRIP_SUFFIX)) {
throw new RuntimeException("Properties are not allowed to end in " + STRIP_SUFFIX);
}
immutableMapBuilder.put(entryKey, entry.getValue()); // depends on control dependency: [if], data = [none]
}
}
return ConfigFactory.parseMap(immutableMapBuilder.build());
} } |
public class class_name {
private void ensureConsistency(final ADTNode<ADTState<I, O>, I, O> leaf) {
final ADTState<I, O> state = leaf.getHypothesisState();
final Word<I> as = state.getAccessSequence();
final Word<O> asOut = this.hypothesis.computeOutput(as);
ADTNode<ADTState<I, O>, I, O> iter = leaf;
while (iter != null) {
final Pair<Word<I>, Word<O>> trace = ADTUtil.buildTraceForNode(iter);
final Word<I> input = trace.getFirst();
final Word<O> output = trace.getSecond();
final Word<O> hypOut = this.hypothesis.computeStateOutput(state, input);
if (!hypOut.equals(output)) {
this.openCounterExamples.add(new DefaultQuery<>(as.concat(input), asOut.concat(output)));
}
iter = ADTUtil.getStartOfADS(iter).getParent();
}
} } | public class class_name {
private void ensureConsistency(final ADTNode<ADTState<I, O>, I, O> leaf) {
final ADTState<I, O> state = leaf.getHypothesisState();
final Word<I> as = state.getAccessSequence();
final Word<O> asOut = this.hypothesis.computeOutput(as);
ADTNode<ADTState<I, O>, I, O> iter = leaf;
while (iter != null) {
final Pair<Word<I>, Word<O>> trace = ADTUtil.buildTraceForNode(iter);
final Word<I> input = trace.getFirst();
final Word<O> output = trace.getSecond();
final Word<O> hypOut = this.hypothesis.computeStateOutput(state, input);
if (!hypOut.equals(output)) {
this.openCounterExamples.add(new DefaultQuery<>(as.concat(input), asOut.concat(output))); // depends on control dependency: [if], data = [none]
}
iter = ADTUtil.getStartOfADS(iter).getParent(); // depends on control dependency: [while], data = [(iter]
}
} } |
public class class_name {
public static double volume(SpatialComparable box) {
final int dim = box.getDimensionality();
double vol = 1.;
for(int i = 0; i < dim; i++) {
double delta = box.getMax(i) - box.getMin(i);
if(delta == 0.) {
return 0.;
}
vol *= delta;
}
return vol;
} } | public class class_name {
public static double volume(SpatialComparable box) {
final int dim = box.getDimensionality();
double vol = 1.;
for(int i = 0; i < dim; i++) {
double delta = box.getMax(i) - box.getMin(i);
if(delta == 0.) {
return 0.; // depends on control dependency: [if], data = [none]
}
vol *= delta; // depends on control dependency: [for], data = [none]
}
return vol;
} } |
public class class_name {
protected int getNextNode()
{
if (m_foundLast)
return DTM.NULL;
if (m_isFresh)
{
m_currentNode = m_traverser.first(m_root);
m_isFresh = false;
}
// I shouldn't have to do this the check for current node, I think.
// numbering\numbering24.xsl fails if I don't do this. I think
// it occurs as the walkers are backing up. -sb
else if(DTM.NULL != m_currentNode)
{
m_currentNode = m_traverser.next(m_root, m_currentNode);
}
if (DTM.NULL == m_currentNode)
this.m_foundLast = true;
return m_currentNode;
} } | public class class_name {
protected int getNextNode()
{
if (m_foundLast)
return DTM.NULL;
if (m_isFresh)
{
m_currentNode = m_traverser.first(m_root); // depends on control dependency: [if], data = [none]
m_isFresh = false; // depends on control dependency: [if], data = [none]
}
// I shouldn't have to do this the check for current node, I think.
// numbering\numbering24.xsl fails if I don't do this. I think
// it occurs as the walkers are backing up. -sb
else if(DTM.NULL != m_currentNode)
{
m_currentNode = m_traverser.next(m_root, m_currentNode); // depends on control dependency: [if], data = [m_currentNode)]
}
if (DTM.NULL == m_currentNode)
this.m_foundLast = true;
return m_currentNode;
} } |
public class class_name {
private void setFunctionArgs(final List<BELObject> args) {
if (args != null) {
this.functionArgs = args;
this.terms = new ArrayList<Term>();
this.parameters = new ArrayList<Parameter>();
for (final BELObject arg : functionArgs) {
if (arg instanceof Term) {
terms.add((Term) arg);
} else if (arg instanceof Parameter) {
parameters.add((Parameter) arg);
} else {
String err = arg.getClass().getName();
err = err.concat(" is not a valid function argument");
throw new UnsupportedOperationException(err);
}
}
} else {
this.functionArgs = null;
this.terms = null;
this.parameters = null;
}
} } | public class class_name {
private void setFunctionArgs(final List<BELObject> args) {
if (args != null) {
this.functionArgs = args; // depends on control dependency: [if], data = [none]
this.terms = new ArrayList<Term>(); // depends on control dependency: [if], data = [none]
this.parameters = new ArrayList<Parameter>(); // depends on control dependency: [if], data = [none]
for (final BELObject arg : functionArgs) {
if (arg instanceof Term) {
terms.add((Term) arg); // depends on control dependency: [if], data = [none]
} else if (arg instanceof Parameter) {
parameters.add((Parameter) arg); // depends on control dependency: [if], data = [none]
} else {
String err = arg.getClass().getName();
err = err.concat(" is not a valid function argument"); // depends on control dependency: [if], data = [none]
throw new UnsupportedOperationException(err);
}
}
} else {
this.functionArgs = null; // depends on control dependency: [if], data = [none]
this.terms = null; // depends on control dependency: [if], data = [none]
this.parameters = null; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public ServiceCall<RecognitionJobs> checkJobs(CheckJobsOptions checkJobsOptions) {
String[] pathSegments = { "v1/recognitions" };
RequestBuilder builder = RequestBuilder.get(RequestBuilder.constructHttpUrl(getEndPoint(), pathSegments));
Map<String, String> sdkHeaders = SdkCommon.getSdkHeaders("speech_to_text", "v1", "checkJobs");
for (Entry<String, String> header : sdkHeaders.entrySet()) {
builder.header(header.getKey(), header.getValue());
}
builder.header("Accept", "application/json");
if (checkJobsOptions != null) {
}
return createServiceCall(builder.build(), ResponseConverterUtils.getObject(RecognitionJobs.class));
} } | public class class_name {
public ServiceCall<RecognitionJobs> checkJobs(CheckJobsOptions checkJobsOptions) {
String[] pathSegments = { "v1/recognitions" };
RequestBuilder builder = RequestBuilder.get(RequestBuilder.constructHttpUrl(getEndPoint(), pathSegments));
Map<String, String> sdkHeaders = SdkCommon.getSdkHeaders("speech_to_text", "v1", "checkJobs");
for (Entry<String, String> header : sdkHeaders.entrySet()) {
builder.header(header.getKey(), header.getValue()); // depends on control dependency: [for], data = [header]
}
builder.header("Accept", "application/json");
if (checkJobsOptions != null) {
}
return createServiceCall(builder.build(), ResponseConverterUtils.getObject(RecognitionJobs.class));
} } |
public class class_name {
public void marshall(DeleteTagsRequest deleteTagsRequest, ProtocolMarshaller protocolMarshaller) {
if (deleteTagsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deleteTagsRequest.getResourceId(), RESOURCEID_BINDING);
protocolMarshaller.marshall(deleteTagsRequest.getTagKeys(), TAGKEYS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(DeleteTagsRequest deleteTagsRequest, ProtocolMarshaller protocolMarshaller) {
if (deleteTagsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deleteTagsRequest.getResourceId(), RESOURCEID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(deleteTagsRequest.getTagKeys(), TAGKEYS_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private void addFailedWorkUnits(List<WorkUnit> previousWorkUnitsForRetry,
MultiWorkUnitWeightedQueue multiWorkUnitWeightedQueue) {
for (WorkUnit wu : previousWorkUnitsForRetry) {
try {
multiWorkUnitWeightedQueue.addWorkUnit(wu,
this.fs.getFileStatus(new Path(wu.getProp(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL))).getLen());
} catch (IOException e) {
Throwables.propagate(e);
}
LOG.info(
"Will process file from previous workunit: " + wu.getProp(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL));
this.fileCount++;
}
} } | public class class_name {
private void addFailedWorkUnits(List<WorkUnit> previousWorkUnitsForRetry,
MultiWorkUnitWeightedQueue multiWorkUnitWeightedQueue) {
for (WorkUnit wu : previousWorkUnitsForRetry) {
try {
multiWorkUnitWeightedQueue.addWorkUnit(wu,
this.fs.getFileStatus(new Path(wu.getProp(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL))).getLen()); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
Throwables.propagate(e);
} // depends on control dependency: [catch], data = [none]
LOG.info(
"Will process file from previous workunit: " + wu.getProp(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL)); // depends on control dependency: [for], data = [none]
this.fileCount++; // depends on control dependency: [for], data = [none]
}
} } |
public class class_name {
private AclImpl convert(Map<Serializable, Acl> inputMap, Long currentIdentity) {
Assert.notEmpty(inputMap, "InputMap required");
Assert.notNull(currentIdentity, "CurrentIdentity required");
// Retrieve this Acl from the InputMap
Acl uncastAcl = inputMap.get(currentIdentity);
Assert.isInstanceOf(AclImpl.class, uncastAcl,
"The inputMap contained a non-AclImpl");
AclImpl inputAcl = (AclImpl) uncastAcl;
Acl parent = inputAcl.getParentAcl();
if ((parent != null) && parent instanceof StubAclParent) {
// Lookup the parent
StubAclParent stubAclParent = (StubAclParent) parent;
parent = convert(inputMap, stubAclParent.getId());
}
// Now we have the parent (if there is one), create the true AclImpl
AclImpl result = new AclImpl(inputAcl.getObjectIdentity(),
(Long) inputAcl.getId(), aclAuthorizationStrategy, grantingStrategy,
parent, null, inputAcl.isEntriesInheriting(), inputAcl.getOwner());
// Copy the "aces" from the input to the destination
// Obtain the "aces" from the input ACL
List<AccessControlEntryImpl> aces = readAces(inputAcl);
// Create a list in which to store the "aces" for the "result" AclImpl instance
List<AccessControlEntryImpl> acesNew = new ArrayList<>();
// Iterate over the "aces" input and replace each nested
// AccessControlEntryImpl.getAcl() with the new "result" AclImpl instance
// This ensures StubAclParent instances are removed, as per SEC-951
for (AccessControlEntryImpl ace : aces) {
setAclOnAce(ace, result);
acesNew.add(ace);
}
// Finally, now that the "aces" have been converted to have the "result" AclImpl
// instance, modify the "result" AclImpl instance
setAces(result, acesNew);
return result;
} } | public class class_name {
private AclImpl convert(Map<Serializable, Acl> inputMap, Long currentIdentity) {
Assert.notEmpty(inputMap, "InputMap required");
Assert.notNull(currentIdentity, "CurrentIdentity required");
// Retrieve this Acl from the InputMap
Acl uncastAcl = inputMap.get(currentIdentity);
Assert.isInstanceOf(AclImpl.class, uncastAcl,
"The inputMap contained a non-AclImpl");
AclImpl inputAcl = (AclImpl) uncastAcl;
Acl parent = inputAcl.getParentAcl();
if ((parent != null) && parent instanceof StubAclParent) {
// Lookup the parent
StubAclParent stubAclParent = (StubAclParent) parent;
parent = convert(inputMap, stubAclParent.getId()); // depends on control dependency: [if], data = [none]
}
// Now we have the parent (if there is one), create the true AclImpl
AclImpl result = new AclImpl(inputAcl.getObjectIdentity(),
(Long) inputAcl.getId(), aclAuthorizationStrategy, grantingStrategy,
parent, null, inputAcl.isEntriesInheriting(), inputAcl.getOwner());
// Copy the "aces" from the input to the destination
// Obtain the "aces" from the input ACL
List<AccessControlEntryImpl> aces = readAces(inputAcl);
// Create a list in which to store the "aces" for the "result" AclImpl instance
List<AccessControlEntryImpl> acesNew = new ArrayList<>();
// Iterate over the "aces" input and replace each nested
// AccessControlEntryImpl.getAcl() with the new "result" AclImpl instance
// This ensures StubAclParent instances are removed, as per SEC-951
for (AccessControlEntryImpl ace : aces) {
setAclOnAce(ace, result); // depends on control dependency: [for], data = [ace]
acesNew.add(ace); // depends on control dependency: [for], data = [ace]
}
// Finally, now that the "aces" have been converted to have the "result" AclImpl
// instance, modify the "result" AclImpl instance
setAces(result, acesNew);
return result;
} } |
public class class_name {
protected static Level transformLevel(final CSNodeWrapper node, final Map<Integer, Node> nodes,
final Map<String, SpecTopic> targetTopics, final List<CSNodeWrapper> relationshipFromNodes, List<Process> processes) {
final Level level;
if (node.getNodeType() == CommonConstants.CS_NODE_APPENDIX) {
level = new Appendix(node.getTitle());
} else if (node.getNodeType() == CommonConstants.CS_NODE_CHAPTER) {
level = new Chapter(node.getTitle());
} else if (node.getNodeType() == CommonConstants.CS_NODE_PART) {
level = new Part(node.getTitle());
} else if (node.getNodeType() == CommonConstants.CS_NODE_PROCESS) {
level = new Process(node.getTitle());
} else if (node.getNodeType() == CommonConstants.CS_NODE_SECTION) {
level = new Section(node.getTitle());
} else if (node.getNodeType() == CommonConstants.CS_NODE_PREFACE) {
level = new Preface(node.getTitle());
} else if (node.getNodeType() == CommonConstants.CS_NODE_INITIAL_CONTENT) {
level = new InitialContent();
} else {
throw new IllegalArgumentException("The passed node is not a Level");
}
level.setConditionStatement(node.getCondition());
level.setTargetId(node.getTargetId());
level.setUniqueId(node.getId() == null ? null : node.getId().toString());
// Set the fixed url properties
applyFixedURLs(node, level);
// Collect any relationships for processing after everything is transformed
if (node.getRelatedToNodes() != null && node.getRelatedToNodes().getItems() != null && !node.getRelatedToNodes().getItems()
.isEmpty()) {
relationshipFromNodes.add(node);
}
// Transform the info topic node if one exists for the level
if (node.getInfoTopicNode() != null) {
final InfoTopic infoTopic = transformInfoTopic(node, node.getInfoTopicNode());
level.setInfoTopic(infoTopic);
}
// Add all the levels/topics
if (node.getChildren() != null && node.getChildren().getItems() != null) {
final List<CSNodeWrapper> childNodes = node.getChildren().getItems();
final HashMap<CSNodeWrapper, Node> levelNodes = new HashMap<CSNodeWrapper, Node>();
final HashMap<CSNodeWrapper, SpecTopic> initialContentNodes = new HashMap<CSNodeWrapper, SpecTopic>();
for (final CSNodeWrapper childNode : childNodes) {
if (childNode.getNodeType() == CommonConstants.CS_NODE_TOPIC) {
final SpecTopic topic = transformSpecTopic(childNode, nodes, targetTopics, relationshipFromNodes);
levelNodes.put(childNode, topic);
} else if (childNode.getNodeType() == CommonConstants.CS_NODE_COMMENT) {
final Comment comment = transformComment(childNode);
levelNodes.put(childNode, comment);
} else if (childNode.getNodeType() == CommonConstants.CS_NODE_COMMON_CONTENT) {
final CommonContent commonContent = transformCommonContent(childNode);
levelNodes.put(childNode, commonContent);
} else if (childNode.getNodeType() == CommonConstants.CS_NODE_INITIAL_CONTENT_TOPIC) {
final SpecTopic initialContentTopic = transformSpecTopicWithoutTypeCheck(childNode, nodes, targetTopics,
relationshipFromNodes);
if (level instanceof InitialContent) {
levelNodes.put(childNode, initialContentTopic);
} else {
initialContentNodes.put(childNode, initialContentTopic);
}
} else {
final Level childLevel = transformLevel(childNode, nodes, targetTopics, relationshipFromNodes, processes);
levelNodes.put(childNode, childLevel);
}
}
// Sort the level nodes so that they are in the right order based on next/prev values.
final LinkedHashMap<CSNodeWrapper, Node> sortedMap = CSNodeSorter.sortMap(levelNodes);
// PressGang 1.4+ stores the initial content inside it's own container, instead of on the level
if (!(level instanceof InitialContent) && !initialContentNodes.isEmpty()) {
final LinkedHashMap<CSNodeWrapper, SpecTopic> sortedInitialContentMap = CSNodeSorter.sortMap(initialContentNodes);
final InitialContent initialContent = new InitialContent();
level.appendChild(initialContent);
// Add the initial content topics to the level now that they are in the right order.
final Iterator<Map.Entry<CSNodeWrapper, SpecTopic>> frontMatterIter = sortedInitialContentMap.entrySet().iterator();
while (frontMatterIter.hasNext()) {
final Map.Entry<CSNodeWrapper, SpecTopic> entry = frontMatterIter.next();
initialContent.appendSpecTopic(entry.getValue());
}
}
// Add the child nodes to the level now that they are in the right order.
final Iterator<Map.Entry<CSNodeWrapper, Node>> iter = sortedMap.entrySet().iterator();
while (iter.hasNext()) {
final Map.Entry<CSNodeWrapper, Node> entry = iter.next();
level.appendChild(entry.getValue());
// Add a new line to separate chapters/parts
if (isNodeASeparatorLevel(entry.getValue()) && iter.hasNext()) {
level.appendChild(new TextNode("\n"));
}
}
}
// Add the node to the list of processed nodes so that the relationships can be added once everything is processed
nodes.put(node.getId(), level);
// We need to keep track of processes to process their relationships
if (level instanceof Process) {
processes.add((Process) level);
}
return level;
} } | public class class_name {
protected static Level transformLevel(final CSNodeWrapper node, final Map<Integer, Node> nodes,
final Map<String, SpecTopic> targetTopics, final List<CSNodeWrapper> relationshipFromNodes, List<Process> processes) {
final Level level;
if (node.getNodeType() == CommonConstants.CS_NODE_APPENDIX) {
level = new Appendix(node.getTitle()); // depends on control dependency: [if], data = [none]
} else if (node.getNodeType() == CommonConstants.CS_NODE_CHAPTER) {
level = new Chapter(node.getTitle()); // depends on control dependency: [if], data = [none]
} else if (node.getNodeType() == CommonConstants.CS_NODE_PART) {
level = new Part(node.getTitle()); // depends on control dependency: [if], data = [none]
} else if (node.getNodeType() == CommonConstants.CS_NODE_PROCESS) {
level = new Process(node.getTitle()); // depends on control dependency: [if], data = [none]
} else if (node.getNodeType() == CommonConstants.CS_NODE_SECTION) {
level = new Section(node.getTitle()); // depends on control dependency: [if], data = [none]
} else if (node.getNodeType() == CommonConstants.CS_NODE_PREFACE) {
level = new Preface(node.getTitle()); // depends on control dependency: [if], data = [none]
} else if (node.getNodeType() == CommonConstants.CS_NODE_INITIAL_CONTENT) {
level = new InitialContent(); // depends on control dependency: [if], data = [none]
} else {
throw new IllegalArgumentException("The passed node is not a Level");
}
level.setConditionStatement(node.getCondition());
level.setTargetId(node.getTargetId());
level.setUniqueId(node.getId() == null ? null : node.getId().toString());
// Set the fixed url properties
applyFixedURLs(node, level);
// Collect any relationships for processing after everything is transformed
if (node.getRelatedToNodes() != null && node.getRelatedToNodes().getItems() != null && !node.getRelatedToNodes().getItems()
.isEmpty()) {
relationshipFromNodes.add(node); // depends on control dependency: [if], data = [none]
}
// Transform the info topic node if one exists for the level
if (node.getInfoTopicNode() != null) {
final InfoTopic infoTopic = transformInfoTopic(node, node.getInfoTopicNode());
level.setInfoTopic(infoTopic); // depends on control dependency: [if], data = [none]
}
// Add all the levels/topics
if (node.getChildren() != null && node.getChildren().getItems() != null) {
final List<CSNodeWrapper> childNodes = node.getChildren().getItems();
final HashMap<CSNodeWrapper, Node> levelNodes = new HashMap<CSNodeWrapper, Node>();
final HashMap<CSNodeWrapper, SpecTopic> initialContentNodes = new HashMap<CSNodeWrapper, SpecTopic>();
for (final CSNodeWrapper childNode : childNodes) {
if (childNode.getNodeType() == CommonConstants.CS_NODE_TOPIC) {
final SpecTopic topic = transformSpecTopic(childNode, nodes, targetTopics, relationshipFromNodes);
levelNodes.put(childNode, topic); // depends on control dependency: [if], data = [none]
} else if (childNode.getNodeType() == CommonConstants.CS_NODE_COMMENT) {
final Comment comment = transformComment(childNode);
levelNodes.put(childNode, comment); // depends on control dependency: [if], data = [none]
} else if (childNode.getNodeType() == CommonConstants.CS_NODE_COMMON_CONTENT) {
final CommonContent commonContent = transformCommonContent(childNode);
levelNodes.put(childNode, commonContent); // depends on control dependency: [if], data = [none]
} else if (childNode.getNodeType() == CommonConstants.CS_NODE_INITIAL_CONTENT_TOPIC) {
final SpecTopic initialContentTopic = transformSpecTopicWithoutTypeCheck(childNode, nodes, targetTopics,
relationshipFromNodes);
if (level instanceof InitialContent) {
levelNodes.put(childNode, initialContentTopic); // depends on control dependency: [if], data = [none]
} else {
initialContentNodes.put(childNode, initialContentTopic); // depends on control dependency: [if], data = [none]
}
} else {
final Level childLevel = transformLevel(childNode, nodes, targetTopics, relationshipFromNodes, processes);
levelNodes.put(childNode, childLevel); // depends on control dependency: [if], data = [none]
}
}
// Sort the level nodes so that they are in the right order based on next/prev values.
final LinkedHashMap<CSNodeWrapper, Node> sortedMap = CSNodeSorter.sortMap(levelNodes);
// PressGang 1.4+ stores the initial content inside it's own container, instead of on the level
if (!(level instanceof InitialContent) && !initialContentNodes.isEmpty()) {
final LinkedHashMap<CSNodeWrapper, SpecTopic> sortedInitialContentMap = CSNodeSorter.sortMap(initialContentNodes);
final InitialContent initialContent = new InitialContent();
level.appendChild(initialContent); // depends on control dependency: [if], data = [none]
// Add the initial content topics to the level now that they are in the right order.
final Iterator<Map.Entry<CSNodeWrapper, SpecTopic>> frontMatterIter = sortedInitialContentMap.entrySet().iterator();
while (frontMatterIter.hasNext()) {
final Map.Entry<CSNodeWrapper, SpecTopic> entry = frontMatterIter.next();
initialContent.appendSpecTopic(entry.getValue()); // depends on control dependency: [while], data = [none]
}
}
// Add the child nodes to the level now that they are in the right order.
final Iterator<Map.Entry<CSNodeWrapper, Node>> iter = sortedMap.entrySet().iterator();
while (iter.hasNext()) {
final Map.Entry<CSNodeWrapper, Node> entry = iter.next();
level.appendChild(entry.getValue()); // depends on control dependency: [while], data = [none]
// Add a new line to separate chapters/parts
if (isNodeASeparatorLevel(entry.getValue()) && iter.hasNext()) {
level.appendChild(new TextNode("\n")); // depends on control dependency: [if], data = [none]
}
}
}
// Add the node to the list of processed nodes so that the relationships can be added once everything is processed
nodes.put(node.getId(), level);
// We need to keep track of processes to process their relationships
if (level instanceof Process) {
processes.add((Process) level); // depends on control dependency: [if], data = [none]
}
return level;
} } |
public class class_name {
public HttpHeaders add(HttpHeaders headers) {
if (headers == null) {
throw new NullPointerException("headers");
}
for (Map.Entry<String, String> e: headers) {
add(e.getKey(), e.getValue());
}
return this;
} } | public class class_name {
public HttpHeaders add(HttpHeaders headers) {
if (headers == null) {
throw new NullPointerException("headers");
}
for (Map.Entry<String, String> e: headers) {
add(e.getKey(), e.getValue()); // depends on control dependency: [for], data = [e]
}
return this;
} } |
public class class_name {
public QueryParameter partialCopy(final QueryParameterKind... excludedElements) {
List<QueryParameterKind> excludedList = Arrays.asList(excludedElements);
QueryParameter returnValue = new QueryParameter();
if (!excludedList.contains(QueryParameterKind.CONSTRAINTS)) {
returnValue.rawConstraints = this.rawConstraints;
}
if (!excludedList.contains(QueryParameterKind.GROUPS)) {
returnValue.groups = this.groups;
}
if (!excludedList.contains(QueryParameterKind.ORDERS)) {
returnValue.orders = this.orders;
}
if (!excludedList.contains(QueryParameterKind.PAGE)) {
returnValue.pageSize = this.pageSize;
returnValue.page = this.page;
}
if (!excludedList.contains(QueryParameterKind.TIMEZONE)) {
returnValue.timezoneName = this.timezoneName;
}
return returnValue;
} } | public class class_name {
public QueryParameter partialCopy(final QueryParameterKind... excludedElements) {
List<QueryParameterKind> excludedList = Arrays.asList(excludedElements);
QueryParameter returnValue = new QueryParameter();
if (!excludedList.contains(QueryParameterKind.CONSTRAINTS)) {
returnValue.rawConstraints = this.rawConstraints; // depends on control dependency: [if], data = [none]
}
if (!excludedList.contains(QueryParameterKind.GROUPS)) {
returnValue.groups = this.groups; // depends on control dependency: [if], data = [none]
}
if (!excludedList.contains(QueryParameterKind.ORDERS)) {
returnValue.orders = this.orders; // depends on control dependency: [if], data = [none]
}
if (!excludedList.contains(QueryParameterKind.PAGE)) {
returnValue.pageSize = this.pageSize; // depends on control dependency: [if], data = [none]
returnValue.page = this.page; // depends on control dependency: [if], data = [none]
}
if (!excludedList.contains(QueryParameterKind.TIMEZONE)) {
returnValue.timezoneName = this.timezoneName; // depends on control dependency: [if], data = [none]
}
return returnValue;
} } |
public class class_name {
public static Geometry clipToTile(TDWay way, Geometry geometry, TileCoordinate tileCoordinate,
int enlargementInMeters) {
Geometry tileBBJTS = null;
Geometry ret = null;
// create tile bounding box
tileBBJTS = tileToJTSGeometry(tileCoordinate.getX(), tileCoordinate.getY(), tileCoordinate.getZoomlevel(),
enlargementInMeters);
// clip the geometry by intersection with the bounding box of the tile
// may throw a TopologyException
try {
if (!geometry.isValid()) {
// this should stop the problem of non-noded intersections that trigger an error when
// clipping
LOGGER.warning("invalid geometry prior to tile clipping, trying to repair " + way.getId());
geometry = JTSUtils.repairInvalidPolygon(geometry);
if (!geometry.isValid()) {
LOGGER.warning("invalid geometry even after attempt to fix " + way.getId());
}
}
ret = tileBBJTS.intersection(geometry);
// according to Ludwig (see issue332) valid polygons may become invalid by clipping (at least
// in the Python shapely library
// we need to investigate this more closely and write approriate test cases
// for now, I check whether the resulting polygon is valid and if not try to repair it
if ((ret instanceof Polygon || ret instanceof MultiPolygon) && !ret.isValid()) {
LOGGER.warning("clipped way is not valid, trying to repair it: " + way.getId());
ret = JTSUtils.repairInvalidPolygon(ret);
if (ret == null) {
way.setInvalid(true);
LOGGER.warning("could not repair invalid polygon: " + way.getId());
}
}
} catch (TopologyException e) {
LOGGER.log(Level.WARNING, "JTS cannot clip way, not storing it in data file: " + way.getId(), e);
way.setInvalid(true);
return null;
}
return ret;
} } | public class class_name {
public static Geometry clipToTile(TDWay way, Geometry geometry, TileCoordinate tileCoordinate,
int enlargementInMeters) {
Geometry tileBBJTS = null;
Geometry ret = null;
// create tile bounding box
tileBBJTS = tileToJTSGeometry(tileCoordinate.getX(), tileCoordinate.getY(), tileCoordinate.getZoomlevel(),
enlargementInMeters);
// clip the geometry by intersection with the bounding box of the tile
// may throw a TopologyException
try {
if (!geometry.isValid()) {
// this should stop the problem of non-noded intersections that trigger an error when
// clipping
LOGGER.warning("invalid geometry prior to tile clipping, trying to repair " + way.getId()); // depends on control dependency: [if], data = [none]
geometry = JTSUtils.repairInvalidPolygon(geometry); // depends on control dependency: [if], data = [none]
if (!geometry.isValid()) {
LOGGER.warning("invalid geometry even after attempt to fix " + way.getId()); // depends on control dependency: [if], data = [none]
}
}
ret = tileBBJTS.intersection(geometry); // depends on control dependency: [try], data = [none]
// according to Ludwig (see issue332) valid polygons may become invalid by clipping (at least
// in the Python shapely library
// we need to investigate this more closely and write approriate test cases
// for now, I check whether the resulting polygon is valid and if not try to repair it
if ((ret instanceof Polygon || ret instanceof MultiPolygon) && !ret.isValid()) {
LOGGER.warning("clipped way is not valid, trying to repair it: " + way.getId()); // depends on control dependency: [if], data = [none]
ret = JTSUtils.repairInvalidPolygon(ret); // depends on control dependency: [if], data = [none]
if (ret == null) {
way.setInvalid(true); // depends on control dependency: [if], data = [none]
LOGGER.warning("could not repair invalid polygon: " + way.getId()); // depends on control dependency: [if], data = [none]
}
}
} catch (TopologyException e) {
LOGGER.log(Level.WARNING, "JTS cannot clip way, not storing it in data file: " + way.getId(), e);
way.setInvalid(true);
return null;
} // depends on control dependency: [catch], data = [none]
return ret;
} } |
public class class_name {
public void loadPolicy() {
model.clearPolicy();
adapter.loadPolicy(model);
model.printPolicy();
if (autoBuildRoleLinks) {
buildRoleLinks();
}
} } | public class class_name {
public void loadPolicy() {
model.clearPolicy();
adapter.loadPolicy(model);
model.printPolicy();
if (autoBuildRoleLinks) {
buildRoleLinks(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private Map findProviders() {
Map providers = getContainer().getComponentDescriptorMap(Provider.class.getName());
if (providers == null) {
throw new Error("No providers discovered");
}
Set keys = providers.keySet();
Map found = null;
for (Iterator iter = keys.iterator(); iter.hasNext();) {
String key = (String)iter.next();
Provider provider;
try {
provider = (Provider) getContainer().lookup(Provider.class.getName(), key);
}
catch (Exception e) {
log.warn("Failed to lookup provider for key: {}", key, e);
continue;
}
if (provider != null) {
if (found == null) {
found = new HashMap();
}
found.put(key, provider);
}
}
return found;
} } | public class class_name {
private Map findProviders() {
Map providers = getContainer().getComponentDescriptorMap(Provider.class.getName());
if (providers == null) {
throw new Error("No providers discovered");
}
Set keys = providers.keySet();
Map found = null;
for (Iterator iter = keys.iterator(); iter.hasNext();) {
String key = (String)iter.next();
Provider provider;
try {
provider = (Provider) getContainer().lookup(Provider.class.getName(), key); // depends on control dependency: [try], data = [none]
}
catch (Exception e) {
log.warn("Failed to lookup provider for key: {}", key, e);
continue;
} // depends on control dependency: [catch], data = [none]
if (provider != null) {
if (found == null) {
found = new HashMap(); // depends on control dependency: [if], data = [none]
}
found.put(key, provider); // depends on control dependency: [if], data = [none]
}
}
return found;
} } |
public class class_name {
static ConfigObject includeWithoutFallback(final ConfigIncludeContext context, String name) {
// the heuristic is valid URL then URL, else relative to including file;
// relativeTo in a file falls back to classpath inside relativeTo().
URL url;
try {
url = new URL(name);
} catch (MalformedURLException e) {
url = null;
}
if (url != null) {
return includeURLWithoutFallback(context, url);
} else {
NameSource source = new RelativeNameSource(context);
return fromBasename(source, name, context.parseOptions());
}
} } | public class class_name {
static ConfigObject includeWithoutFallback(final ConfigIncludeContext context, String name) {
// the heuristic is valid URL then URL, else relative to including file;
// relativeTo in a file falls back to classpath inside relativeTo().
URL url;
try {
url = new URL(name); // depends on control dependency: [try], data = [none]
} catch (MalformedURLException e) {
url = null;
} // depends on control dependency: [catch], data = [none]
if (url != null) {
return includeURLWithoutFallback(context, url); // depends on control dependency: [if], data = [none]
} else {
NameSource source = new RelativeNameSource(context);
return fromBasename(source, name, context.parseOptions()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@SuppressWarnings("serial")
public static Type[] forGenericInterfaces(final Class<?> type) {
Type[] result = new Type[type.getGenericInterfaces().length];
for (int i = 0; i < result.length; i++) {
final int index = i;
result[i] = forTypeProvider(new DefaultTypeProvider() {
@Override
public Type getType() {
return type.getGenericInterfaces()[index];
}
});
}
return result;
} } | public class class_name {
@SuppressWarnings("serial")
public static Type[] forGenericInterfaces(final Class<?> type) {
Type[] result = new Type[type.getGenericInterfaces().length];
for (int i = 0; i < result.length; i++) {
final int index = i;
result[i] = forTypeProvider(new DefaultTypeProvider() {
@Override
public Type getType() {
return type.getGenericInterfaces()[index];
}
}); // depends on control dependency: [for], data = [none]
}
return result;
} } |
public class class_name {
@Override
protected void executeRuntimeStep(OperationContext context, ModelNode operation) {
final PathAddress address = PathAddress.pathAddress(operation.require(OP_ADDR));
final String cacheContainerName = address.getLastElement().getValue();
final String attrName = operation.require(ModelDescriptionConstants.NAME).asString();
final ServiceController<?> controller = context.getServiceRegistry(false).getService(CacheContainerServiceName.CACHE_CONTAINER.getServiceName(cacheContainerName));
DefaultCacheContainer cacheManager = (DefaultCacheContainer) controller.getValue();
CacheManagerMetrics metric = CacheManagerMetrics.getStat(attrName);
ModelNode result = new ModelNode();
if (metric == null) {
context.getFailureDescription().set(String.format("Unknown metric %s", attrName));
} else if (cacheManager == null) {
context.getFailureDescription().set(String.format("Unavailable cache container %s", attrName));
} else {
CacheContainerStats stats = cacheManager.getStats();
ClusterContainerStats clusterContainerStats = cacheManager.getGlobalComponentRegistry().getComponent(ClusterContainerStats.class);
switch (metric) {
case CACHE_MANAGER_STATUS:
result.set(SecurityActions.getCacheManagerStatus(cacheManager).toString());
break;
case IS_COORDINATOR:
result.set(SecurityActions.getCacheManagerIsCoordinator(cacheManager));
break;
case LOCAL_ADDRESS:
Address localAddress = SecurityActions.getCacheManagerLocalAddress(cacheManager);
result.set(localAddress != null ? localAddress.toString() : "N/A");
break;
case COORDINATOR_ADDRESS:
Address coordinatorAddress = SecurityActions.getCacheManagerCoordinatorAddress(cacheManager);
result.set(coordinatorAddress != null ? coordinatorAddress.toString() : "N/A");
break;
case CLUSTER_AVAILABILITY:
result.set(SecurityActions.getCacheManagerClusterAvailability(cacheManager));
break;
case CLUSTER_NAME:
String clusterName = SecurityActions.getCacheManagerClusterName(cacheManager);
result.set(clusterName != null ? clusterName : "N/A");
break;
case DEFINED_CACHE_NAMES:
String definedCacheNames = SecurityActions.getDefinedCacheNames(cacheManager);
result.set(definedCacheNames != null ? definedCacheNames : "N/A");
break;
case CLUSTER_SIZE:
List<Address> members = SecurityActions.getMembers(cacheManager);
result.set(members != null ? Integer.toString(members.size()) : "N/A");
break;
case CREATED_CACHE_COUNT:
result.set(SecurityActions.getCacheCreatedCount(cacheManager));
break;
case DEFINED_CACHE_COUNT:
result.set(SecurityActions.getDefinedCacheCount(cacheManager));
break;
case MEMBERS:
members = SecurityActions.getMembers(cacheManager);
result.set(members != null ? members.toString() : "N/A");
break;
case RUNNING_CACHE_COUNT:
result.set(SecurityActions.getRunningCacheCount(cacheManager));
break;
case VERSION:
result.set(Version.getVersion());
break;
case AVERAGE_READ_TIME:
result.set(stats.getAverageReadTime());
break;
case AVERAGE_WRITE_TIME:
result.set(stats.getAverageWriteTime());
break;
case AVERAGE_REMOVE_TIME:
result.set(stats.getAverageRemoveTime());
break;
case AVERAGE_READ_TIME_NANOS:
result.set(stats.getAverageReadTimeNanos());
break;
case AVERAGE_WRITE_TIME_NANOS:
result.set(stats.getAverageWriteTimeNanos());
break;
case AVERAGE_REMOVE_TIME_NANOS:
result.set(stats.getAverageRemoveTimeNanos());
break;
case TIME_SINCE_START:
result.set(stats.getTimeSinceStart());
break;
case EVICTIONS:
result.set(stats.getEvictions());
break;
case HIT_RATIO:
result.set(stats.getHitRatio());
break;
case HITS:
result.set(stats.getHits());
break;
case MISSES:
result.set(stats.getMisses());
break;
case NUMBER_OF_ENTRIES:
result.set(stats.getCurrentNumberOfEntries());
break;
case NUMBER_OF_ENTRIES_IN_MEMORY:
result.set(stats.getCurrentNumberOfEntriesInMemory());
break;
case DATA_MEMORY_USED:
result.set(stats.getDataMemoryUsed());
break;
case OFF_HEAP_MEMORY_USED:
result.set(stats.getOffHeapMemoryUsed());
break;
case MINIMUM_REQUIRED_NODES:
result.set(stats.getRequiredMinimumNumberOfNodes());
break;
case READ_WRITE_RATIO:
result.set(stats.getReadWriteRatio());
break;
case REMOVE_HITS:
result.set(stats.getRemoveHits());
break;
case REMOVE_MISSES:
result.set(stats.getRemoveMisses());
break;
case STORES:
result.set(stats.getStores());
break;
case TIME_SINCE_RESET:
result.set(stats.getTimeSinceReset());
break;
case ONLINE_SITES:
case OFFLINE_SITES:
case MIXED_SITES: {
GlobalComponentRegistry registry = SecurityActions.getGlobalComponentRegistry(cacheManager);
Collection<String> sites = filterSitesByStatus(registry, metric);
if (sites.isEmpty()) {
result.setEmptyList();
} else {
result.set(toModelNodeCollection(sites));
}
break;
}
case SITES_VIEW:
Set<String> sitesView = SecurityActions.getSitesView(cacheManager);
result.set(sitesView != null ? sitesView.toString() : "N/A");
break;
case MEMORY_AVAILABLE:
result.set(clusterContainerStats.getMemoryAvailable());
break;
case MEMORY_MAX:
result.set(clusterContainerStats.getMemoryMax());
break;
case MEMORY_TOTAL:
result.set(clusterContainerStats.getMemoryTotal());
break;
case MEMORY_USED:
result.set(clusterContainerStats.getMemoryUsed());
break;
case STALE_STATS_THRESHOLD:
result.set(clusterContainerStats.getStaleStatsThreshold());
break;
default:
context.getFailureDescription().set(String.format("Unknown metric %s", metric));
break;
}
context.getResult().set(result);
}
} } | public class class_name {
@Override
protected void executeRuntimeStep(OperationContext context, ModelNode operation) {
final PathAddress address = PathAddress.pathAddress(operation.require(OP_ADDR));
final String cacheContainerName = address.getLastElement().getValue();
final String attrName = operation.require(ModelDescriptionConstants.NAME).asString();
final ServiceController<?> controller = context.getServiceRegistry(false).getService(CacheContainerServiceName.CACHE_CONTAINER.getServiceName(cacheContainerName));
DefaultCacheContainer cacheManager = (DefaultCacheContainer) controller.getValue();
CacheManagerMetrics metric = CacheManagerMetrics.getStat(attrName);
ModelNode result = new ModelNode();
if (metric == null) {
context.getFailureDescription().set(String.format("Unknown metric %s", attrName)); // depends on control dependency: [if], data = [none]
} else if (cacheManager == null) {
context.getFailureDescription().set(String.format("Unavailable cache container %s", attrName)); // depends on control dependency: [if], data = [none]
} else {
CacheContainerStats stats = cacheManager.getStats();
ClusterContainerStats clusterContainerStats = cacheManager.getGlobalComponentRegistry().getComponent(ClusterContainerStats.class);
switch (metric) {
case CACHE_MANAGER_STATUS:
result.set(SecurityActions.getCacheManagerStatus(cacheManager).toString());
break;
case IS_COORDINATOR:
result.set(SecurityActions.getCacheManagerIsCoordinator(cacheManager));
break;
case LOCAL_ADDRESS:
Address localAddress = SecurityActions.getCacheManagerLocalAddress(cacheManager);
result.set(localAddress != null ? localAddress.toString() : "N/A");
break;
case COORDINATOR_ADDRESS:
Address coordinatorAddress = SecurityActions.getCacheManagerCoordinatorAddress(cacheManager);
result.set(coordinatorAddress != null ? coordinatorAddress.toString() : "N/A");
break;
case CLUSTER_AVAILABILITY:
result.set(SecurityActions.getCacheManagerClusterAvailability(cacheManager));
break;
case CLUSTER_NAME:
String clusterName = SecurityActions.getCacheManagerClusterName(cacheManager);
result.set(clusterName != null ? clusterName : "N/A");
break;
case DEFINED_CACHE_NAMES:
String definedCacheNames = SecurityActions.getDefinedCacheNames(cacheManager);
result.set(definedCacheNames != null ? definedCacheNames : "N/A");
break;
case CLUSTER_SIZE:
List<Address> members = SecurityActions.getMembers(cacheManager);
result.set(members != null ? Integer.toString(members.size()) : "N/A");
break;
case CREATED_CACHE_COUNT:
result.set(SecurityActions.getCacheCreatedCount(cacheManager));
break;
case DEFINED_CACHE_COUNT:
result.set(SecurityActions.getDefinedCacheCount(cacheManager));
break;
case MEMBERS:
members = SecurityActions.getMembers(cacheManager);
result.set(members != null ? members.toString() : "N/A");
break;
case RUNNING_CACHE_COUNT:
result.set(SecurityActions.getRunningCacheCount(cacheManager));
break;
case VERSION:
result.set(Version.getVersion());
break;
case AVERAGE_READ_TIME:
result.set(stats.getAverageReadTime());
break;
case AVERAGE_WRITE_TIME:
result.set(stats.getAverageWriteTime());
break;
case AVERAGE_REMOVE_TIME:
result.set(stats.getAverageRemoveTime());
break;
case AVERAGE_READ_TIME_NANOS:
result.set(stats.getAverageReadTimeNanos());
break;
case AVERAGE_WRITE_TIME_NANOS:
result.set(stats.getAverageWriteTimeNanos());
break;
case AVERAGE_REMOVE_TIME_NANOS:
result.set(stats.getAverageRemoveTimeNanos());
break;
case TIME_SINCE_START:
result.set(stats.getTimeSinceStart());
break;
case EVICTIONS:
result.set(stats.getEvictions());
break;
case HIT_RATIO:
result.set(stats.getHitRatio());
break;
case HITS:
result.set(stats.getHits());
break;
case MISSES:
result.set(stats.getMisses());
break;
case NUMBER_OF_ENTRIES:
result.set(stats.getCurrentNumberOfEntries());
break;
case NUMBER_OF_ENTRIES_IN_MEMORY:
result.set(stats.getCurrentNumberOfEntriesInMemory());
break;
case DATA_MEMORY_USED:
result.set(stats.getDataMemoryUsed());
break;
case OFF_HEAP_MEMORY_USED:
result.set(stats.getOffHeapMemoryUsed());
break;
case MINIMUM_REQUIRED_NODES:
result.set(stats.getRequiredMinimumNumberOfNodes());
break;
case READ_WRITE_RATIO:
result.set(stats.getReadWriteRatio());
break;
case REMOVE_HITS:
result.set(stats.getRemoveHits());
break;
case REMOVE_MISSES:
result.set(stats.getRemoveMisses());
break;
case STORES:
result.set(stats.getStores());
break;
case TIME_SINCE_RESET:
result.set(stats.getTimeSinceReset());
break;
case ONLINE_SITES:
case OFFLINE_SITES:
case MIXED_SITES: {
GlobalComponentRegistry registry = SecurityActions.getGlobalComponentRegistry(cacheManager);
Collection<String> sites = filterSitesByStatus(registry, metric);
if (sites.isEmpty()) {
result.setEmptyList(); // depends on control dependency: [if], data = [none]
} else {
result.set(toModelNodeCollection(sites)); // depends on control dependency: [if], data = [none]
}
break;
}
case SITES_VIEW:
Set<String> sitesView = SecurityActions.getSitesView(cacheManager);
result.set(sitesView != null ? sitesView.toString() : "N/A");
break;
case MEMORY_AVAILABLE:
result.set(clusterContainerStats.getMemoryAvailable());
break;
case MEMORY_MAX:
result.set(clusterContainerStats.getMemoryMax());
break;
case MEMORY_TOTAL:
result.set(clusterContainerStats.getMemoryTotal());
break;
case MEMORY_USED:
result.set(clusterContainerStats.getMemoryUsed());
break;
case STALE_STATS_THRESHOLD:
result.set(clusterContainerStats.getStaleStatsThreshold());
break;
default:
context.getFailureDescription().set(String.format("Unknown metric %s", metric));
break;
}
context.getResult().set(result); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public boolean apply(final Class<?> type, final Object value,
final List<ValidationFailure> validationFailures) {
if (appliesTo(type)) {
validate(type, value, validationFailures);
return true;
}
return false;
} } | public class class_name {
public boolean apply(final Class<?> type, final Object value,
final List<ValidationFailure> validationFailures) {
if (appliesTo(type)) {
validate(type, value, validationFailures); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
public void setAllowedMethods(java.util.Collection<String> allowedMethods) {
if (allowedMethods == null) {
this.allowedMethods = null;
return;
}
this.allowedMethods = new java.util.ArrayList<String>(allowedMethods);
} } | public class class_name {
public void setAllowedMethods(java.util.Collection<String> allowedMethods) {
if (allowedMethods == null) {
this.allowedMethods = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.allowedMethods = new java.util.ArrayList<String>(allowedMethods);
} } |
public class class_name {
public void usageDetail(
final char commandPrefix,
final ICmdLineArg<?> arg,
final int _indentLevel)
{
nameIt(commandPrefix, arg);
String help = ((AbstractCLA<?>) arg).getHelp();
if (help != null)
{
if (help.length() > 40)
help = help.substring(0, 40);
allign(29);
append(help);
unallign();
}
} } | public class class_name {
public void usageDetail(
final char commandPrefix,
final ICmdLineArg<?> arg,
final int _indentLevel)
{
nameIt(commandPrefix, arg);
String help = ((AbstractCLA<?>) arg).getHelp();
if (help != null)
{
if (help.length() > 40)
help = help.substring(0, 40);
allign(29); // depends on control dependency: [if], data = [none]
append(help); // depends on control dependency: [if], data = [(help]
unallign(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@POST
@Path("/upload")
@Consumes(MediaType.MULTIPART_FORM_DATA)
public Response uploadFile(
@FormDataParam("file") InputStream uploadedInputStream,
@FormDataParam("file") FormDataContentDisposition fileDetail) {
Config config = createConfig();
if (uploadedInputStream == null) {
String msg = "input stream is null";
LOG.error(msg);
return Response.status(Response.Status.BAD_REQUEST)
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(msg)).build();
}
if (fileDetail == null) {
String msg = "form data content disposition is null";
LOG.error(msg);
return Response.status(Response.Status.BAD_REQUEST)
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(msg)).build();
}
String uploadDir = config.getStringValue(FILE_SYSTEM_DIRECTORY);
final String fileName = UUID.randomUUID() + "-" + fileDetail.getFileName();
final String uploadedFileLocation
= uploadDir + "/" + fileName;
// save it
try {
FileHelper.writeToFile(uploadedInputStream, uploadedFileLocation);
} catch (IOException e) {
LOG.error("error uploading file {}", fileDetail.getFileName(), e);
return Response.serverError()
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(e.getMessage()))
.build();
}
String uri = String.format("http://%s:%s/api/v1/file/download/%s",
getHostNameOrIP(), getPort(), fileName);
return Response.status(Response.Status.OK).entity(uri).build();
} } | public class class_name {
@POST
@Path("/upload")
@Consumes(MediaType.MULTIPART_FORM_DATA)
public Response uploadFile(
@FormDataParam("file") InputStream uploadedInputStream,
@FormDataParam("file") FormDataContentDisposition fileDetail) {
Config config = createConfig();
if (uploadedInputStream == null) {
String msg = "input stream is null";
LOG.error(msg); // depends on control dependency: [if], data = [none]
return Response.status(Response.Status.BAD_REQUEST)
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(msg)).build(); // depends on control dependency: [if], data = [none]
}
if (fileDetail == null) {
String msg = "form data content disposition is null";
LOG.error(msg); // depends on control dependency: [if], data = [none]
return Response.status(Response.Status.BAD_REQUEST)
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(msg)).build(); // depends on control dependency: [if], data = [none]
}
String uploadDir = config.getStringValue(FILE_SYSTEM_DIRECTORY);
final String fileName = UUID.randomUUID() + "-" + fileDetail.getFileName();
final String uploadedFileLocation
= uploadDir + "/" + fileName;
// save it
try {
FileHelper.writeToFile(uploadedInputStream, uploadedFileLocation); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
LOG.error("error uploading file {}", fileDetail.getFileName(), e);
return Response.serverError()
.type(MediaType.APPLICATION_JSON)
.entity(Utils.createMessage(e.getMessage()))
.build();
} // depends on control dependency: [catch], data = [none]
String uri = String.format("http://%s:%s/api/v1/file/download/%s",
getHostNameOrIP(), getPort(), fileName);
return Response.status(Response.Status.OK).entity(uri).build();
} } |
public class class_name {
public int[] scoreCols() {
DTree tree = _tree;
if (tree.actual_mtries() == _hs.length && tree._mtrys_per_tree == _hs.length) return null;
// per-tree pre-selected columns
int[] activeCols = tree._cols;
// Log.info("For tree with seed " + tree._seed + ", out of " + _hs.length + " cols, the following cols are activated via mtry_per_tree=" + tree._mtrys_per_tree + ": " + Arrays.toString(activeCols));
int[] cols = new int[activeCols.length];
int len=0;
// collect columns that can be split (non-constant, large enough to split, etc.)
for(int i = 0; i< activeCols.length; i++ ) {
int idx = activeCols[i];
assert(idx == i || tree._mtrys_per_tree < _hs.length);
if( _hs[idx]==null ) continue; // Ignore not-tracked cols
assert _hs[idx]._min < _hs[idx]._maxEx && _hs[idx].nbins() > 1 : "broken histo range "+_hs[idx];
cols[len++] = idx; // Gather active column
}
// Log.info("These columns can be split: " + Arrays.toString(Arrays.copyOfRange(cols, 0, len)));
int choices = len; // Number of columns I can choose from
int mtries = tree.actual_mtries();
if (choices > 0) { // It can happen that we have no choices, because this node cannot be split any more (all active columns are constant, for example).
// Draw up to mtry columns at random without replacement.
for (int i = 0; i < mtries; i++) {
if (len == 0) break; // Out of choices!
int idx2 = tree._rand.nextInt(len);
int col = cols[idx2]; // The chosen column
cols[idx2] = cols[--len]; // Compress out of array; do not choose again
cols[len] = col; // Swap chosen in just after 'len'
}
assert len < choices;
}
// Log.info("Picking these (mtry=" + mtries + ") columns to evaluate for splitting: " + Arrays.toString(Arrays.copyOfRange(cols, len, choices)));
return Arrays.copyOfRange(cols, len, choices);
} } | public class class_name {
public int[] scoreCols() {
DTree tree = _tree;
if (tree.actual_mtries() == _hs.length && tree._mtrys_per_tree == _hs.length) return null;
// per-tree pre-selected columns
int[] activeCols = tree._cols;
// Log.info("For tree with seed " + tree._seed + ", out of " + _hs.length + " cols, the following cols are activated via mtry_per_tree=" + tree._mtrys_per_tree + ": " + Arrays.toString(activeCols));
int[] cols = new int[activeCols.length];
int len=0;
// collect columns that can be split (non-constant, large enough to split, etc.)
for(int i = 0; i< activeCols.length; i++ ) {
int idx = activeCols[i];
assert(idx == i || tree._mtrys_per_tree < _hs.length); // depends on control dependency: [for], data = [i]
if( _hs[idx]==null ) continue; // Ignore not-tracked cols
assert _hs[idx]._min < _hs[idx]._maxEx && _hs[idx].nbins() > 1 : "broken histo range "+_hs[idx]; // depends on control dependency: [for], data = [none]
cols[len++] = idx; // Gather active column // depends on control dependency: [for], data = [none]
}
// Log.info("These columns can be split: " + Arrays.toString(Arrays.copyOfRange(cols, 0, len)));
int choices = len; // Number of columns I can choose from
int mtries = tree.actual_mtries();
if (choices > 0) { // It can happen that we have no choices, because this node cannot be split any more (all active columns are constant, for example).
// Draw up to mtry columns at random without replacement.
for (int i = 0; i < mtries; i++) {
if (len == 0) break; // Out of choices!
int idx2 = tree._rand.nextInt(len);
int col = cols[idx2]; // The chosen column
cols[idx2] = cols[--len]; // Compress out of array; do not choose again // depends on control dependency: [for], data = [none]
cols[len] = col; // Swap chosen in just after 'len' // depends on control dependency: [for], data = [none]
}
assert len < choices;
}
// Log.info("Picking these (mtry=" + mtries + ") columns to evaluate for splitting: " + Arrays.toString(Arrays.copyOfRange(cols, len, choices)));
return Arrays.copyOfRange(cols, len, choices);
} } |
public class class_name {
private static <T> void log(RedwoodChannels channels, String description, T[] array) {
Redwood.startTrack(description);
if (array == null) {
channels.log("(array is null)");
} else if (array.length == 0) {
channels.log("(empty)");
} else {
int index = 0;
for (T item : array) {
if (dispatchable(item)) {
log(channels, "Index " + index, item);
} else {
channels.logf("Index %d: %s", index, item);
}
index++;
}
}
Redwood.endTrack(description);
} } | public class class_name {
private static <T> void log(RedwoodChannels channels, String description, T[] array) {
Redwood.startTrack(description);
if (array == null) {
channels.log("(array is null)");
// depends on control dependency: [if], data = [(array]
} else if (array.length == 0) {
channels.log("(empty)");
// depends on control dependency: [if], data = [none]
} else {
int index = 0;
for (T item : array) {
if (dispatchable(item)) {
log(channels, "Index " + index, item);
// depends on control dependency: [if], data = [none]
} else {
channels.logf("Index %d: %s", index, item);
// depends on control dependency: [if], data = [none]
}
index++;
// depends on control dependency: [for], data = [none]
}
}
Redwood.endTrack(description);
} } |
public class class_name {
synchronized long userUpdate(long value) {
if (value == currValue) {
currValue += increment;
return value;
}
if (increment > 0) {
if (value > currValue) {
currValue += ((value - currValue + increment) / increment)
* increment;
}
} else {
if (value < currValue) {
currValue += ((value - currValue + increment) / increment)
* increment;
}
}
return value;
} } | public class class_name {
synchronized long userUpdate(long value) {
if (value == currValue) {
currValue += increment; // depends on control dependency: [if], data = [none]
return value; // depends on control dependency: [if], data = [none]
}
if (increment > 0) {
if (value > currValue) {
currValue += ((value - currValue + increment) / increment)
* increment; // depends on control dependency: [if], data = [(value]
}
} else {
if (value < currValue) {
currValue += ((value - currValue + increment) / increment)
* increment; // depends on control dependency: [if], data = [(value]
}
}
return value;
} } |
public class class_name {
public final void mRULE_RICH_TEXT_INBETWEEN() throws RecognitionException {
try {
int _type = RULE_RICH_TEXT_INBETWEEN;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalSARL.g:16910:26: ( '\\uFFFD' ( RULE_IN_RICH_STRING )* ( '\\'' ( '\\'' )? )? '\\uFFFD' )
// InternalSARL.g:16910:28: '\\uFFFD' ( RULE_IN_RICH_STRING )* ( '\\'' ( '\\'' )? )? '\\uFFFD'
{
match('\uFFFD');
// InternalSARL.g:16910:37: ( RULE_IN_RICH_STRING )*
loop19:
do {
int alt19=2;
int LA19_0 = input.LA(1);
if ( (LA19_0=='\'') ) {
int LA19_1 = input.LA(2);
if ( (LA19_1=='\'') ) {
int LA19_4 = input.LA(3);
if ( ((LA19_4>='\u0000' && LA19_4<='&')||(LA19_4>='(' && LA19_4<='\uFFFC')||(LA19_4>='\uFFFE' && LA19_4<='\uFFFF')) ) {
alt19=1;
}
}
else if ( ((LA19_1>='\u0000' && LA19_1<='&')||(LA19_1>='(' && LA19_1<='\uFFFC')||(LA19_1>='\uFFFE' && LA19_1<='\uFFFF')) ) {
alt19=1;
}
}
else if ( ((LA19_0>='\u0000' && LA19_0<='&')||(LA19_0>='(' && LA19_0<='\uFFFC')||(LA19_0>='\uFFFE' && LA19_0<='\uFFFF')) ) {
alt19=1;
}
switch (alt19) {
case 1 :
// InternalSARL.g:16910:37: RULE_IN_RICH_STRING
{
mRULE_IN_RICH_STRING();
}
break;
default :
break loop19;
}
} while (true);
// InternalSARL.g:16910:58: ( '\\'' ( '\\'' )? )?
int alt21=2;
int LA21_0 = input.LA(1);
if ( (LA21_0=='\'') ) {
alt21=1;
}
switch (alt21) {
case 1 :
// InternalSARL.g:16910:59: '\\'' ( '\\'' )?
{
match('\'');
// InternalSARL.g:16910:64: ( '\\'' )?
int alt20=2;
int LA20_0 = input.LA(1);
if ( (LA20_0=='\'') ) {
alt20=1;
}
switch (alt20) {
case 1 :
// InternalSARL.g:16910:64: '\\''
{
match('\'');
}
break;
}
}
break;
}
match('\uFFFD');
}
state.type = _type;
state.channel = _channel;
}
finally {
}
} } | public class class_name {
public final void mRULE_RICH_TEXT_INBETWEEN() throws RecognitionException {
try {
int _type = RULE_RICH_TEXT_INBETWEEN;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalSARL.g:16910:26: ( '\\uFFFD' ( RULE_IN_RICH_STRING )* ( '\\'' ( '\\'' )? )? '\\uFFFD' )
// InternalSARL.g:16910:28: '\\uFFFD' ( RULE_IN_RICH_STRING )* ( '\\'' ( '\\'' )? )? '\\uFFFD'
{
match('\uFFFD');
// InternalSARL.g:16910:37: ( RULE_IN_RICH_STRING )*
loop19:
do {
int alt19=2;
int LA19_0 = input.LA(1);
if ( (LA19_0=='\'') ) {
int LA19_1 = input.LA(2);
if ( (LA19_1=='\'') ) {
int LA19_4 = input.LA(3);
if ( ((LA19_4>='\u0000' && LA19_4<='&')||(LA19_4>='(' && LA19_4<='\uFFFC')||(LA19_4>='\uFFFE' && LA19_4<='\uFFFF')) ) {
alt19=1; // depends on control dependency: [if], data = [none]
}
}
else if ( ((LA19_1>='\u0000' && LA19_1<='&')||(LA19_1>='(' && LA19_1<='\uFFFC')||(LA19_1>='\uFFFE' && LA19_1<='\uFFFF')) ) {
alt19=1; // depends on control dependency: [if], data = [none]
}
}
else if ( ((LA19_0>='\u0000' && LA19_0<='&')||(LA19_0>='(' && LA19_0<='\uFFFC')||(LA19_0>='\uFFFE' && LA19_0<='\uFFFF')) ) {
alt19=1; // depends on control dependency: [if], data = [none]
}
switch (alt19) {
case 1 :
// InternalSARL.g:16910:37: RULE_IN_RICH_STRING
{
mRULE_IN_RICH_STRING();
}
break;
default :
break loop19;
}
} while (true);
// InternalSARL.g:16910:58: ( '\\'' ( '\\'' )? )?
int alt21=2;
int LA21_0 = input.LA(1);
if ( (LA21_0=='\'') ) {
alt21=1; // depends on control dependency: [if], data = [none]
}
switch (alt21) {
case 1 :
// InternalSARL.g:16910:59: '\\'' ( '\\'' )?
{
match('\'');
// InternalSARL.g:16910:64: ( '\\'' )?
int alt20=2;
int LA20_0 = input.LA(1);
if ( (LA20_0=='\'') ) {
alt20=1; // depends on control dependency: [if], data = [none]
}
switch (alt20) {
case 1 :
// InternalSARL.g:16910:64: '\\''
{
match('\'');
}
break;
}
}
break;
}
match('\uFFFD');
}
state.type = _type;
state.channel = _channel;
}
finally {
}
} } |
public class class_name {
public boolean addStaticImport(final String typeFqn, final String member) {
if (typeFqn == null || member == null) {
throw new IllegalArgumentException("Type name " + typeFqn + ". Member name: " + member);
}
if (hasStaticImport(typeFqn, member, false)) {
return false;
}
XImportDeclaration importDecl = createImport(typeFqn, member);
importDecl.setStatic(true);
return addedImportDeclarations.add(importDecl);
} } | public class class_name {
public boolean addStaticImport(final String typeFqn, final String member) {
if (typeFqn == null || member == null) {
throw new IllegalArgumentException("Type name " + typeFqn + ". Member name: " + member);
}
if (hasStaticImport(typeFqn, member, false)) {
return false; // depends on control dependency: [if], data = [none]
}
XImportDeclaration importDecl = createImport(typeFqn, member);
importDecl.setStatic(true);
return addedImportDeclarations.add(importDecl);
} } |
public class class_name {
public String getHeader()
{
if (this.header == null)
{
if (this.headerKeys == null)
{
this.headerKeys = new String[2];
this.headerKeys[0] = getPropertyName() + ".header";
this.headerKeys[1] = getPropertyName();
}
this.header = ValkyrieRepository.getInstance().getApplicationConfig().messageResolver().getMessage(new DefaultMessageSourceResolvable(this.headerKeys, null,
this.headerKeys[this.headerKeys.length - 1]));
}
// JTableHeader has a reusable defaultHeaderRenderer on which the default height must be correct.
// when painting, the columns headers are processed in order and height is being calculated,
// if label is null or empty string header height is 4 and thus leaves us with a very small
// table-header, fix this by returning a space (-> font-size is incorporated)
return "".equals(this.header) ? " " : this.header;
} } | public class class_name {
public String getHeader()
{
if (this.header == null)
{
if (this.headerKeys == null)
{
this.headerKeys = new String[2]; // depends on control dependency: [if], data = [none]
this.headerKeys[0] = getPropertyName() + ".header"; // depends on control dependency: [if], data = [none]
this.headerKeys[1] = getPropertyName(); // depends on control dependency: [if], data = [none]
}
this.header = ValkyrieRepository.getInstance().getApplicationConfig().messageResolver().getMessage(new DefaultMessageSourceResolvable(this.headerKeys, null,
this.headerKeys[this.headerKeys.length - 1])); // depends on control dependency: [if], data = [none]
}
// JTableHeader has a reusable defaultHeaderRenderer on which the default height must be correct.
// when painting, the columns headers are processed in order and height is being calculated,
// if label is null or empty string header height is 4 and thus leaves us with a very small
// table-header, fix this by returning a space (-> font-size is incorporated)
return "".equals(this.header) ? " " : this.header;
} } |
public class class_name {
private void updateDurationTimeUnit(FastTrackColumn column)
{
if (m_durationTimeUnit == null && isDurationColumn(column))
{
int value = ((DurationColumn) column).getTimeUnitValue();
if (value != 1)
{
m_durationTimeUnit = FastTrackUtility.getTimeUnit(value);
}
}
} } | public class class_name {
private void updateDurationTimeUnit(FastTrackColumn column)
{
if (m_durationTimeUnit == null && isDurationColumn(column))
{
int value = ((DurationColumn) column).getTimeUnitValue();
if (value != 1)
{
m_durationTimeUnit = FastTrackUtility.getTimeUnit(value); // depends on control dependency: [if], data = [(value]
}
}
} } |
public class class_name {
private void pruneTracks(SetTrackInfo<Desc> info, GrowQueue_I32 unassociated) {
if( unassociated.size > maxInactiveTracks ) {
// make the first N elements the ones which will be dropped
int numDrop = unassociated.size-maxInactiveTracks;
for (int i = 0; i < numDrop; i++) {
int selected = rand.nextInt(unassociated.size-i)+i;
int a = unassociated.get(i);
unassociated.data[i] = unassociated.data[selected];
unassociated.data[selected] = a;
}
List<PointTrack> dropList = new ArrayList<>();
for (int i = 0; i < numDrop; i++) {
dropList.add( info.tracks.get(unassociated.get(i)) );
}
for (int i = 0; i < dropList.size(); i++) {
dropTrack(dropList.get(i));
}
}
} } | public class class_name {
private void pruneTracks(SetTrackInfo<Desc> info, GrowQueue_I32 unassociated) {
if( unassociated.size > maxInactiveTracks ) {
// make the first N elements the ones which will be dropped
int numDrop = unassociated.size-maxInactiveTracks;
for (int i = 0; i < numDrop; i++) {
int selected = rand.nextInt(unassociated.size-i)+i;
int a = unassociated.get(i);
unassociated.data[i] = unassociated.data[selected]; // depends on control dependency: [for], data = [i]
unassociated.data[selected] = a; // depends on control dependency: [for], data = [none]
}
List<PointTrack> dropList = new ArrayList<>();
for (int i = 0; i < numDrop; i++) {
dropList.add( info.tracks.get(unassociated.get(i)) ); // depends on control dependency: [for], data = [i]
}
for (int i = 0; i < dropList.size(); i++) {
dropTrack(dropList.get(i)); // depends on control dependency: [for], data = [i]
}
}
} } |
public class class_name {
public static boolean contains(Bbox parent, Bbox child) {
if (child.getX() < parent.getX()) {
return false;
}
if (child.getY() < parent.getY()) {
return false;
}
if (child.getMaxX() > parent.getMaxX()) {
return false;
}
if (child.getMaxY() > parent.getMaxY()) {
return false;
}
return true;
} } | public class class_name {
public static boolean contains(Bbox parent, Bbox child) {
if (child.getX() < parent.getX()) {
return false; // depends on control dependency: [if], data = [none]
}
if (child.getY() < parent.getY()) {
return false; // depends on control dependency: [if], data = [none]
}
if (child.getMaxX() > parent.getMaxX()) {
return false; // depends on control dependency: [if], data = [none]
}
if (child.getMaxY() > parent.getMaxY()) {
return false; // depends on control dependency: [if], data = [none]
}
return true;
} } |
public class class_name {
public static CmsGalleryConfigurationJSO parseConfiguration(String conf) {
if (CmsStringUtil.isEmptyOrWhitespaceOnly(conf)) {
conf = "{}";
}
return (CmsGalleryConfigurationJSO)CmsDomUtil.parseJSON(conf);
} } | public class class_name {
public static CmsGalleryConfigurationJSO parseConfiguration(String conf) {
if (CmsStringUtil.isEmptyOrWhitespaceOnly(conf)) {
conf = "{}";
// depends on control dependency: [if], data = [none]
}
return (CmsGalleryConfigurationJSO)CmsDomUtil.parseJSON(conf);
} } |
public class class_name {
@Override
public Collection<V> values() {
if (collectionOfValuesByKey == null) {
collectionOfValuesByKey = new AbstractFilterableCollection<V>() {
@Override
public Iterator<V> iterator() {
return new TransactionalBidiTreeMapIterator<V>(KEY) {
@Override
protected V doGetNext() {
return (V)lastReturnedNode.getData(VALUE);
}
};
}
@Override
public int size() {
return TransactionalBidiTreeMap.this.size();
}
@Override
public boolean contains(Object o) {
return containsValue(o);
}
@Override
public boolean remove(Object o) {
int oldNodeCount = nodeCount;
removeValue(o);
return nodeCount != oldNodeCount;
}
@Override
public boolean removeAll(Collection<?> c) {
boolean modified = false;
Iterator<?> iter = c.iterator();
while (iter.hasNext()) {
if (removeValue(iter.next()) != null) {
modified = true;
}
}
return modified;
}
@Override
public void clear() {
TransactionalBidiTreeMap.this.clear();
}
};
}
return collectionOfValuesByKey;
} } | public class class_name {
@Override
public Collection<V> values() {
if (collectionOfValuesByKey == null) {
collectionOfValuesByKey = new AbstractFilterableCollection<V>() {
@Override
public Iterator<V> iterator() {
return new TransactionalBidiTreeMapIterator<V>(KEY) {
@Override
protected V doGetNext() {
return (V)lastReturnedNode.getData(VALUE);
}
};
}
@Override
public int size() {
return TransactionalBidiTreeMap.this.size();
}
@Override
public boolean contains(Object o) {
return containsValue(o);
}
@Override
public boolean remove(Object o) {
int oldNodeCount = nodeCount;
removeValue(o);
return nodeCount != oldNodeCount;
}
@Override
public boolean removeAll(Collection<?> c) {
boolean modified = false;
Iterator<?> iter = c.iterator();
while (iter.hasNext()) {
if (removeValue(iter.next()) != null) {
modified = true;
// depends on control dependency: [if], data = [none]
}
}
return modified;
}
@Override
public void clear() {
TransactionalBidiTreeMap.this.clear();
}
};
// depends on control dependency: [if], data = [none]
}
return collectionOfValuesByKey;
} } |
public class class_name {
protected String getAnswers(Integer questionSeqId, List<? extends AnswerHeaderContract> answerHeaders) {
String answer = null;
String childAnswer = null;
StringBuilder stringBuilder = new StringBuilder();
if (answerHeaders != null && !answerHeaders.isEmpty()) {
for (AnswerHeaderContract answerHeader : answerHeaders) {
List<? extends AnswerContract> answerDetails = answerHeader.getAnswers();
for (AnswerContract answers : answerDetails) {
if (questionSeqId.equals(getQuestionAnswerService().findQuestionById(answers.getQuestionId()).getQuestionSeqId())) {
answer = answers.getAnswer();
if (answer != null) {
if (!answer.equals(NOT_ANSWERED)) {
stringBuilder.append(answer);
stringBuilder.append(",");
}
}
childAnswer = stringBuilder.toString();
}
}
}
}
return childAnswer;
} } | public class class_name {
protected String getAnswers(Integer questionSeqId, List<? extends AnswerHeaderContract> answerHeaders) {
String answer = null;
String childAnswer = null;
StringBuilder stringBuilder = new StringBuilder();
if (answerHeaders != null && !answerHeaders.isEmpty()) {
for (AnswerHeaderContract answerHeader : answerHeaders) {
List<? extends AnswerContract> answerDetails = answerHeader.getAnswers(); // depends on control dependency: [for], data = [answerHeader]
for (AnswerContract answers : answerDetails) {
if (questionSeqId.equals(getQuestionAnswerService().findQuestionById(answers.getQuestionId()).getQuestionSeqId())) {
answer = answers.getAnswer(); // depends on control dependency: [if], data = [none]
if (answer != null) {
if (!answer.equals(NOT_ANSWERED)) {
stringBuilder.append(answer); // depends on control dependency: [if], data = [none]
stringBuilder.append(","); // depends on control dependency: [if], data = [none]
}
}
childAnswer = stringBuilder.toString(); // depends on control dependency: [if], data = [none]
}
}
}
}
return childAnswer;
} } |
public class class_name {
public Observable<ServiceResponse<Page<VirtualNetworkUsageInner>>> listUsageWithServiceResponseAsync(final String resourceGroupName, final String virtualNetworkName) {
return listUsageSinglePageAsync(resourceGroupName, virtualNetworkName)
.concatMap(new Func1<ServiceResponse<Page<VirtualNetworkUsageInner>>, Observable<ServiceResponse<Page<VirtualNetworkUsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<VirtualNetworkUsageInner>>> call(ServiceResponse<Page<VirtualNetworkUsageInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listUsageNextWithServiceResponseAsync(nextPageLink));
}
});
} } | public class class_name {
public Observable<ServiceResponse<Page<VirtualNetworkUsageInner>>> listUsageWithServiceResponseAsync(final String resourceGroupName, final String virtualNetworkName) {
return listUsageSinglePageAsync(resourceGroupName, virtualNetworkName)
.concatMap(new Func1<ServiceResponse<Page<VirtualNetworkUsageInner>>, Observable<ServiceResponse<Page<VirtualNetworkUsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<VirtualNetworkUsageInner>>> call(ServiceResponse<Page<VirtualNetworkUsageInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page); // depends on control dependency: [if], data = [none]
}
return Observable.just(page).concatWith(listUsageNextWithServiceResponseAsync(nextPageLink));
}
});
} } |
public class class_name {
private void processWorkerExit(Worker w, boolean completedAbruptly) {
if (completedAbruptly) // If abrupt, then workerCount wasn't adjusted
decrementWorkerCount();
final ReentrantLock mainLock = this.mainLock;
mainLock.lock();
try {
completedTaskCount += w.completedTasks;
workers.remove(w);
} finally {
mainLock.unlock();
}
tryTerminate();
int c = ctl.get();
if (runStateLessThan(c, STOP)) {
if (!completedAbruptly) {
int min = allowCoreThreadTimeOut ? 0 : corePoolSize;
if (min == 0 && ! workQueue.isEmpty())
min = 1;
if (workerCountOf(c) >= min)
return; // replacement not needed
}
addWorker(null, false);
}
} } | public class class_name {
private void processWorkerExit(Worker w, boolean completedAbruptly) {
if (completedAbruptly) // If abrupt, then workerCount wasn't adjusted
decrementWorkerCount();
final ReentrantLock mainLock = this.mainLock;
mainLock.lock();
try {
completedTaskCount += w.completedTasks; // depends on control dependency: [try], data = [none]
workers.remove(w); // depends on control dependency: [try], data = [none]
} finally {
mainLock.unlock();
}
tryTerminate();
int c = ctl.get();
if (runStateLessThan(c, STOP)) {
if (!completedAbruptly) {
int min = allowCoreThreadTimeOut ? 0 : corePoolSize;
if (min == 0 && ! workQueue.isEmpty())
min = 1;
if (workerCountOf(c) >= min)
return; // replacement not needed
}
addWorker(null, false); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
Locale getLocale(HandshakeRequest request) {
if(null != request) {
Map<String, List<String>> headers = request.getHeaders();
if(null != headers) {
List<String> accepts = headers.get(HttpHeaders.ACCEPT_LANGUAGE);
logger.debug("Get accept-language from client headers : {}", accepts);
if (null != accepts) {
for (String accept : accepts) {
try {
return localeExtractor.extractFromAccept(accept);
} catch (LocaleNotFoundException ex) {
}
}
}
}
}
return Locale.US;
} } | public class class_name {
Locale getLocale(HandshakeRequest request) {
if(null != request) {
Map<String, List<String>> headers = request.getHeaders();
if(null != headers) {
List<String> accepts = headers.get(HttpHeaders.ACCEPT_LANGUAGE);
logger.debug("Get accept-language from client headers : {}", accepts); // depends on control dependency: [if], data = [none]
if (null != accepts) {
for (String accept : accepts) {
try {
return localeExtractor.extractFromAccept(accept); // depends on control dependency: [try], data = [none]
} catch (LocaleNotFoundException ex) {
} // depends on control dependency: [catch], data = [none]
}
}
}
}
return Locale.US;
} } |
public class class_name {
public void onMouseUp(MouseUpEvent event) {
if (event.getNativeButton() != NativeEvent.BUTTON_RIGHT) {
Coordinate coordinate = getWorldPosition(event);
if (distanceLine.getOriginalLocation() == null) {
distanceLine.setGeometry(getFactory().createLineString(new Coordinate[] { coordinate }));
mapWidget.registerWorldPaintable(distanceLine);
mapWidget.registerWorldPaintable(lineSegment);
dispatchState(State.START);
} else {
Geometry geometry = (Geometry) distanceLine.getOriginalLocation();
InsertCoordinateOperation op = new InsertCoordinateOperation(geometry.getNumPoints(), coordinate);
geometry = op.execute(geometry);
distanceLine.setGeometry(geometry);
tempLength = (float) geometry.getLength();
updateMeasure(event, true);
dispatchState(State.CLICK);
}
mapWidget.render(mapWidget.getMapModel(), RenderGroup.VECTOR, RenderStatus.UPDATE);
}
} } | public class class_name {
public void onMouseUp(MouseUpEvent event) {
if (event.getNativeButton() != NativeEvent.BUTTON_RIGHT) {
Coordinate coordinate = getWorldPosition(event);
if (distanceLine.getOriginalLocation() == null) {
distanceLine.setGeometry(getFactory().createLineString(new Coordinate[] { coordinate })); // depends on control dependency: [if], data = [none]
mapWidget.registerWorldPaintable(distanceLine); // depends on control dependency: [if], data = [none]
mapWidget.registerWorldPaintable(lineSegment); // depends on control dependency: [if], data = [none]
dispatchState(State.START); // depends on control dependency: [if], data = [none]
} else {
Geometry geometry = (Geometry) distanceLine.getOriginalLocation();
InsertCoordinateOperation op = new InsertCoordinateOperation(geometry.getNumPoints(), coordinate);
geometry = op.execute(geometry); // depends on control dependency: [if], data = [none]
distanceLine.setGeometry(geometry); // depends on control dependency: [if], data = [none]
tempLength = (float) geometry.getLength(); // depends on control dependency: [if], data = [none]
updateMeasure(event, true); // depends on control dependency: [if], data = [none]
dispatchState(State.CLICK); // depends on control dependency: [if], data = [none]
}
mapWidget.render(mapWidget.getMapModel(), RenderGroup.VECTOR, RenderStatus.UPDATE); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public EClass getIfcRelAssociatesConstraint() {
if (ifcRelAssociatesConstraintEClass == null) {
ifcRelAssociatesConstraintEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(451);
}
return ifcRelAssociatesConstraintEClass;
} } | public class class_name {
public EClass getIfcRelAssociatesConstraint() {
if (ifcRelAssociatesConstraintEClass == null) {
ifcRelAssociatesConstraintEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(451);
// depends on control dependency: [if], data = [none]
}
return ifcRelAssociatesConstraintEClass;
} } |
public class class_name {
public static <T> BufferedIterator<T> buffered(Iterator<T> self) {
if (self instanceof BufferedIterator) {
return (BufferedIterator<T>) self;
} else {
return new IteratorBufferedIterator<T>(self);
}
} } | public class class_name {
public static <T> BufferedIterator<T> buffered(Iterator<T> self) {
if (self instanceof BufferedIterator) {
return (BufferedIterator<T>) self; // depends on control dependency: [if], data = [none]
} else {
return new IteratorBufferedIterator<T>(self); // depends on control dependency: [if], data = [none]
}
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.