code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
private void deselectChildren(CmsTreeItem item) {
for (String childId : m_childrens.get(item.getId())) {
CmsTreeItem child = m_categories.get(childId);
deselectChildren(child);
child.getCheckBox().setChecked(false);
if (m_selectedCategories.contains(childId)) {
m_selectedCategories.remove(childId);
}
}
} } | public class class_name {
private void deselectChildren(CmsTreeItem item) {
for (String childId : m_childrens.get(item.getId())) {
CmsTreeItem child = m_categories.get(childId);
deselectChildren(child);
// depends on control dependency: [for], data = [none]
child.getCheckBox().setChecked(false);
// depends on control dependency: [for], data = [none]
if (m_selectedCategories.contains(childId)) {
m_selectedCategories.remove(childId);
// depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public static Optional<Path> createTempFilePathAsOpt(Path path) {
debug(log, "createTempFilePathAsOpt", path);
String[] prefixSuffix = JMFiles.getPrefixSuffix(path.toFile());
try {
return Optional
.of(Files.createTempFile(prefixSuffix[0], prefixSuffix[1]))
.filter(JMPath.ExistFilter)
.map(JMPathOperation::deleteOnExit);
} catch (Exception e) {
return JMExceptionManager.handleExceptionAndReturnEmptyOptional(log,
e, "createTempFilePathAsOpt", path);
}
} } | public class class_name {
public static Optional<Path> createTempFilePathAsOpt(Path path) {
debug(log, "createTempFilePathAsOpt", path);
String[] prefixSuffix = JMFiles.getPrefixSuffix(path.toFile());
try {
return Optional
.of(Files.createTempFile(prefixSuffix[0], prefixSuffix[1]))
.filter(JMPath.ExistFilter)
.map(JMPathOperation::deleteOnExit); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
return JMExceptionManager.handleExceptionAndReturnEmptyOptional(log,
e, "createTempFilePathAsOpt", path);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public final BELStatementParser.statement_return statement() throws RecognitionException {
BELStatementParser.statement_return retval = new BELStatementParser.statement_return();
retval.start = input.LT(1);
Object root_0 = null;
Token OPEN_PAREN1=null;
Token CLOSE_PAREN2=null;
BELStatementParser.outer_term_return st = null;
BELStatementParser.relationship_return rel = null;
BELStatementParser.outer_term_return nst = null;
BELStatementParser.relationship_return nrel = null;
BELStatementParser.outer_term_return not = null;
BELStatementParser.outer_term_return ot = null;
Object OPEN_PAREN1_tree=null;
Object CLOSE_PAREN2_tree=null;
try {
// BELStatement.g:64:32: (st= outer_term (rel= relationship ( ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN ) | ot= outer_term ) )? )
// BELStatement.g:65:5: st= outer_term (rel= relationship ( ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN ) | ot= outer_term ) )?
{
root_0 = (Object)adaptor.nil();
pushFollow(FOLLOW_outer_term_in_statement81);
st=outer_term();
state._fsp--;
adaptor.addChild(root_0, st.getTree());
final Statement s = new Statement((st!=null?st.r:null));
retval.r = s;
// BELStatement.g:69:5: (rel= relationship ( ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN ) | ot= outer_term ) )?
int alt2=2;
int LA2_0 = input.LA(1);
if ( ((LA2_0>=76 && LA2_0<=103)) ) {
alt2=1;
}
switch (alt2) {
case 1 :
// BELStatement.g:70:9: rel= relationship ( ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN ) | ot= outer_term )
{
pushFollow(FOLLOW_relationship_in_statement101);
rel=relationship();
state._fsp--;
adaptor.addChild(root_0, rel.getTree());
s.setRelationshipType((rel!=null?rel.r:null));
// BELStatement.g:73:9: ( ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN ) | ot= outer_term )
int alt1=2;
int LA1_0 = input.LA(1);
if ( (LA1_0==OPEN_PAREN) ) {
alt1=1;
}
else if ( ((LA1_0>=17 && LA1_0<=75)) ) {
alt1=2;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 1, 0, input);
throw nvae;
}
switch (alt1) {
case 1 :
// BELStatement.g:74:13: ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN )
{
// BELStatement.g:74:13: ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN )
// BELStatement.g:75:17: OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN
{
OPEN_PAREN1=(Token)match(input,OPEN_PAREN,FOLLOW_OPEN_PAREN_in_statement145);
OPEN_PAREN1_tree = (Object)adaptor.create(OPEN_PAREN1);
adaptor.addChild(root_0, OPEN_PAREN1_tree);
pushFollow(FOLLOW_outer_term_in_statement166);
nst=outer_term();
state._fsp--;
adaptor.addChild(root_0, nst.getTree());
final Statement ns = new Statement((nst!=null?nst.r:null));
pushFollow(FOLLOW_relationship_in_statement188);
nrel=relationship();
state._fsp--;
adaptor.addChild(root_0, nrel.getTree());
ns.setRelationshipType((nrel!=null?nrel.r:null));
pushFollow(FOLLOW_outer_term_in_statement211);
not=outer_term();
state._fsp--;
adaptor.addChild(root_0, not.getTree());
ns.setObject(new Statement.Object((not!=null?not.r:null)));
s.setObject(new Statement.Object(ns));
retval.r = s;
CLOSE_PAREN2=(Token)match(input,CLOSE_PAREN,FOLLOW_CLOSE_PAREN_in_statement231);
CLOSE_PAREN2_tree = (Object)adaptor.create(CLOSE_PAREN2);
adaptor.addChild(root_0, CLOSE_PAREN2_tree);
}
}
break;
case 2 :
// BELStatement.g:90:17: ot= outer_term
{
pushFollow(FOLLOW_outer_term_in_statement280);
ot=outer_term();
state._fsp--;
adaptor.addChild(root_0, ot.getTree());
s.setObject(new Statement.Object((ot!=null?ot.r:null)));
retval.r = s;
}
break;
}
}
break;
}
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
}
return retval;
} } | public class class_name {
public final BELStatementParser.statement_return statement() throws RecognitionException {
BELStatementParser.statement_return retval = new BELStatementParser.statement_return();
retval.start = input.LT(1);
Object root_0 = null;
Token OPEN_PAREN1=null;
Token CLOSE_PAREN2=null;
BELStatementParser.outer_term_return st = null;
BELStatementParser.relationship_return rel = null;
BELStatementParser.outer_term_return nst = null;
BELStatementParser.relationship_return nrel = null;
BELStatementParser.outer_term_return not = null;
BELStatementParser.outer_term_return ot = null;
Object OPEN_PAREN1_tree=null;
Object CLOSE_PAREN2_tree=null;
try {
// BELStatement.g:64:32: (st= outer_term (rel= relationship ( ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN ) | ot= outer_term ) )? )
// BELStatement.g:65:5: st= outer_term (rel= relationship ( ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN ) | ot= outer_term ) )?
{
root_0 = (Object)adaptor.nil();
pushFollow(FOLLOW_outer_term_in_statement81);
st=outer_term();
state._fsp--;
adaptor.addChild(root_0, st.getTree());
final Statement s = new Statement((st!=null?st.r:null));
retval.r = s;
// BELStatement.g:69:5: (rel= relationship ( ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN ) | ot= outer_term ) )?
int alt2=2;
int LA2_0 = input.LA(1);
if ( ((LA2_0>=76 && LA2_0<=103)) ) {
alt2=1; // depends on control dependency: [if], data = [none]
}
switch (alt2) {
case 1 :
// BELStatement.g:70:9: rel= relationship ( ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN ) | ot= outer_term )
{
pushFollow(FOLLOW_relationship_in_statement101);
rel=relationship();
state._fsp--;
adaptor.addChild(root_0, rel.getTree());
s.setRelationshipType((rel!=null?rel.r:null));
// BELStatement.g:73:9: ( ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN ) | ot= outer_term )
int alt1=2;
int LA1_0 = input.LA(1);
if ( (LA1_0==OPEN_PAREN) ) {
alt1=1; // depends on control dependency: [if], data = [none]
}
else if ( ((LA1_0>=17 && LA1_0<=75)) ) {
alt1=2; // depends on control dependency: [if], data = [none]
}
else {
NoViableAltException nvae =
new NoViableAltException("", 1, 0, input);
throw nvae;
}
switch (alt1) {
case 1 :
// BELStatement.g:74:13: ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN )
{
// BELStatement.g:74:13: ( OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN )
// BELStatement.g:75:17: OPEN_PAREN nst= outer_term nrel= relationship not= outer_term CLOSE_PAREN
{
OPEN_PAREN1=(Token)match(input,OPEN_PAREN,FOLLOW_OPEN_PAREN_in_statement145);
OPEN_PAREN1_tree = (Object)adaptor.create(OPEN_PAREN1);
adaptor.addChild(root_0, OPEN_PAREN1_tree);
pushFollow(FOLLOW_outer_term_in_statement166);
nst=outer_term();
state._fsp--;
adaptor.addChild(root_0, nst.getTree());
final Statement ns = new Statement((nst!=null?nst.r:null));
pushFollow(FOLLOW_relationship_in_statement188);
nrel=relationship();
state._fsp--;
adaptor.addChild(root_0, nrel.getTree());
ns.setRelationshipType((nrel!=null?nrel.r:null));
pushFollow(FOLLOW_outer_term_in_statement211);
not=outer_term();
state._fsp--;
adaptor.addChild(root_0, not.getTree());
ns.setObject(new Statement.Object((not!=null?not.r:null)));
s.setObject(new Statement.Object(ns));
retval.r = s;
CLOSE_PAREN2=(Token)match(input,CLOSE_PAREN,FOLLOW_CLOSE_PAREN_in_statement231);
CLOSE_PAREN2_tree = (Object)adaptor.create(CLOSE_PAREN2);
adaptor.addChild(root_0, CLOSE_PAREN2_tree);
}
}
break;
case 2 :
// BELStatement.g:90:17: ot= outer_term
{
pushFollow(FOLLOW_outer_term_in_statement280);
ot=outer_term();
state._fsp--;
adaptor.addChild(root_0, ot.getTree());
s.setObject(new Statement.Object((ot!=null?ot.r:null)));
retval.r = s;
}
break;
}
}
break;
}
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
}
return retval;
} } |
public class class_name {
public static ResidueRangeAndLength parse(String s, AtomPositionMap map) {
ResidueRange rr = parse(s);
ResidueNumber start = rr.getStart();
String chain = rr.getChainName();
// handle special "_" chain
if(chain == null || chain.equals("_")) {
ResidueNumber first = map.getNavMap().firstKey();
chain = first.getChainName();
// Quick check for additional chains. Not guaranteed if the atoms are out of order.
if( ! map.getNavMap().lastKey().getChainName().equals(chain) ) {
logger.warn("Multiple possible chains match '_'. Using chain {}",chain);
}
}
// get a non-null start and end
// if it's the whole chain, choose the first and last residue numbers in the chain
if (start==null) {
start = map.getFirst(chain);
}
ResidueNumber end = rr.getEnd();
if (end==null) { // should happen iff start==null
end = map.getLast(chain);
}
// Replace '_'
start.setChainName(chain);
end.setChainName(chain);
// Now fix any errors and calculate the length
return map.trimToValidResidues(new ResidueRange(chain, start, end));
} } | public class class_name {
public static ResidueRangeAndLength parse(String s, AtomPositionMap map) {
ResidueRange rr = parse(s);
ResidueNumber start = rr.getStart();
String chain = rr.getChainName();
// handle special "_" chain
if(chain == null || chain.equals("_")) {
ResidueNumber first = map.getNavMap().firstKey();
chain = first.getChainName(); // depends on control dependency: [if], data = [none]
// Quick check for additional chains. Not guaranteed if the atoms are out of order.
if( ! map.getNavMap().lastKey().getChainName().equals(chain) ) {
logger.warn("Multiple possible chains match '_'. Using chain {}",chain); // depends on control dependency: [if], data = [none]
}
}
// get a non-null start and end
// if it's the whole chain, choose the first and last residue numbers in the chain
if (start==null) {
start = map.getFirst(chain); // depends on control dependency: [if], data = [none]
}
ResidueNumber end = rr.getEnd();
if (end==null) { // should happen iff start==null
end = map.getLast(chain); // depends on control dependency: [if], data = [none]
}
// Replace '_'
start.setChainName(chain);
end.setChainName(chain);
// Now fix any errors and calculate the length
return map.trimToValidResidues(new ResidueRange(chain, start, end));
} } |
public class class_name {
private void initialize(AtlasGraph graph) throws RepositoryException, IndexException {
AtlasGraphManagement management = graph.getManagementSystem();
try {
if (management.containsPropertyKey(Constants.VERTEX_TYPE_PROPERTY_KEY)) {
LOG.info("Global indexes already exist for graph");
management.commit();
return;
}
/* This is called only once, which is the first time Atlas types are made indexable .*/
LOG.info("Indexes do not exist, Creating indexes for graph.");
management.createVertexIndex(Constants.VERTEX_INDEX, Constants.BACKING_INDEX, Collections.<AtlasPropertyKey>emptyList());
management.createEdgeIndex(Constants.EDGE_INDEX, Constants.BACKING_INDEX);
// create a composite index for guid as its unique
createIndexes(management, Constants.GUID_PROPERTY_KEY, String.class, true,
AtlasCardinality.SINGLE, true, true);
// Add creation_timestamp property to Vertex Index (mixed index)
createIndexes(management, Constants.TIMESTAMP_PROPERTY_KEY, Long.class, false, AtlasCardinality.SINGLE, false, false);
// Add modification_timestamp property to Vertex Index (mixed index)
createIndexes(management, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.class, false,
AtlasCardinality.SINGLE, false, false);
// create a mixed index for entity state. Set systemProperty flag deliberately to false
// so that it doesnt create a composite index which has issues with
// titan 0.5.4 - Refer https://groups.google.com/forum/#!searchin/aureliusgraphs/hemanth/aureliusgraphs/bx7T843mzXU/fjAsclx7GAAJ
createIndexes(management, Constants.STATE_PROPERTY_KEY, String.class, false, AtlasCardinality.SINGLE, false, false);
// Create a composite and mixed index for created by property
createIndexes(management, Constants.CREATED_BY_KEY, String.class, false,
AtlasCardinality.SINGLE, true, true);
// Create a composite and mixed index for modified by property
createIndexes(management, Constants.MODIFIED_BY_KEY, String.class, false,
AtlasCardinality.SINGLE, true, true);
// create a composite and mixed index for type since it can be combined with other keys
createIndexes(management, Constants.ENTITY_TYPE_PROPERTY_KEY, String.class, false, AtlasCardinality.SINGLE,
true, true);
// create a composite and mixed index for type since it can be combined with other keys
createIndexes(management, Constants.SUPER_TYPES_PROPERTY_KEY, String.class, false, AtlasCardinality.SET,
true, true);
// create a composite and mixed index for traitNames since it can be combined with other
// keys. Traits must be a set and not a list.
createIndexes(management, Constants.TRAIT_NAMES_PROPERTY_KEY, String.class, false, AtlasCardinality.SET,
true, true);
// Index for full text search
createFullTextIndex(management);
//Indexes for graph backed type system store
createTypeStoreIndexes(management);
commit(management);
LOG.info("Index creation for global keys complete.");
} catch (Throwable t) {
rollback(management);
throw new RepositoryException(t);
}
} } | public class class_name {
private void initialize(AtlasGraph graph) throws RepositoryException, IndexException {
AtlasGraphManagement management = graph.getManagementSystem();
try {
if (management.containsPropertyKey(Constants.VERTEX_TYPE_PROPERTY_KEY)) {
LOG.info("Global indexes already exist for graph"); // depends on control dependency: [if], data = [none]
management.commit(); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
/* This is called only once, which is the first time Atlas types are made indexable .*/
LOG.info("Indexes do not exist, Creating indexes for graph.");
management.createVertexIndex(Constants.VERTEX_INDEX, Constants.BACKING_INDEX, Collections.<AtlasPropertyKey>emptyList());
management.createEdgeIndex(Constants.EDGE_INDEX, Constants.BACKING_INDEX);
// create a composite index for guid as its unique
createIndexes(management, Constants.GUID_PROPERTY_KEY, String.class, true,
AtlasCardinality.SINGLE, true, true);
// Add creation_timestamp property to Vertex Index (mixed index)
createIndexes(management, Constants.TIMESTAMP_PROPERTY_KEY, Long.class, false, AtlasCardinality.SINGLE, false, false);
// Add modification_timestamp property to Vertex Index (mixed index)
createIndexes(management, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.class, false,
AtlasCardinality.SINGLE, false, false);
// create a mixed index for entity state. Set systemProperty flag deliberately to false
// so that it doesnt create a composite index which has issues with
// titan 0.5.4 - Refer https://groups.google.com/forum/#!searchin/aureliusgraphs/hemanth/aureliusgraphs/bx7T843mzXU/fjAsclx7GAAJ
createIndexes(management, Constants.STATE_PROPERTY_KEY, String.class, false, AtlasCardinality.SINGLE, false, false);
// Create a composite and mixed index for created by property
createIndexes(management, Constants.CREATED_BY_KEY, String.class, false,
AtlasCardinality.SINGLE, true, true);
// Create a composite and mixed index for modified by property
createIndexes(management, Constants.MODIFIED_BY_KEY, String.class, false,
AtlasCardinality.SINGLE, true, true);
// create a composite and mixed index for type since it can be combined with other keys
createIndexes(management, Constants.ENTITY_TYPE_PROPERTY_KEY, String.class, false, AtlasCardinality.SINGLE,
true, true);
// create a composite and mixed index for type since it can be combined with other keys
createIndexes(management, Constants.SUPER_TYPES_PROPERTY_KEY, String.class, false, AtlasCardinality.SET,
true, true);
// create a composite and mixed index for traitNames since it can be combined with other
// keys. Traits must be a set and not a list.
createIndexes(management, Constants.TRAIT_NAMES_PROPERTY_KEY, String.class, false, AtlasCardinality.SET,
true, true);
// Index for full text search
createFullTextIndex(management);
//Indexes for graph backed type system store
createTypeStoreIndexes(management);
commit(management);
LOG.info("Index creation for global keys complete.");
} catch (Throwable t) {
rollback(management);
throw new RepositoryException(t);
}
} } |
public class class_name {
@Override
public XBELHeader convert(Header source) {
if (source == null) return null;
XBELHeader xh = new XBELHeader();
List<String> authors = source.getAuthors();
if (hasItems(authors)) {
XBELAuthorGroup xag = new XBELAuthorGroup();
List<String> xauthors = xag.getAuthor();
for (final String author : authors) {
xauthors.add(author);
}
xh.setAuthorGroup(xag);
}
List<String> licenses = source.getLicenses();
if (hasItems(licenses)) {
XBELLicenseGroup xlg = new XBELLicenseGroup();
List<String> xlicenses = xlg.getLicense();
for (final String license : licenses) {
xlicenses.add(license);
}
xh.setLicenseGroup(xlg);
}
String contactInfo = source.getContactInfo();
String copyright = source.getCopyright();
String description = source.getDescription();
String disclaimer = source.getDisclaimer();
String name = source.getName();
String version = source.getVersion();
xh.setContactInfo(contactInfo);
xh.setCopyright(copyright);
xh.setDescription(description);
xh.setDisclaimer(disclaimer);
xh.setName(name);
xh.setVersion(version);
return xh;
} } | public class class_name {
@Override
public XBELHeader convert(Header source) {
if (source == null) return null;
XBELHeader xh = new XBELHeader();
List<String> authors = source.getAuthors();
if (hasItems(authors)) {
XBELAuthorGroup xag = new XBELAuthorGroup();
List<String> xauthors = xag.getAuthor();
for (final String author : authors) {
xauthors.add(author); // depends on control dependency: [for], data = [author]
}
xh.setAuthorGroup(xag); // depends on control dependency: [if], data = [none]
}
List<String> licenses = source.getLicenses();
if (hasItems(licenses)) {
XBELLicenseGroup xlg = new XBELLicenseGroup();
List<String> xlicenses = xlg.getLicense();
for (final String license : licenses) {
xlicenses.add(license); // depends on control dependency: [for], data = [license]
}
xh.setLicenseGroup(xlg); // depends on control dependency: [if], data = [none]
}
String contactInfo = source.getContactInfo();
String copyright = source.getCopyright();
String description = source.getDescription();
String disclaimer = source.getDisclaimer();
String name = source.getName();
String version = source.getVersion();
xh.setContactInfo(contactInfo);
xh.setCopyright(copyright);
xh.setDescription(description);
xh.setDisclaimer(disclaimer);
xh.setName(name);
xh.setVersion(version);
return xh;
} } |
public class class_name {
public void processPrerequisiteInjections(final SpecNodeWithRelationships specNode, final Document doc, final Element node,
final boolean useFixedUrls) {
// Make sure we have some links to inject
if (specNode.getPrerequisiteRelationships().isEmpty()) return;
// Get the title element so that it can be used later to add the prerequisite topic nodes
Element titleEle = null;
final NodeList titleList = node.getElementsByTagName("title");
for (int i = 0; i < titleList.getLength(); i++) {
if (titleList.item(i).getParentNode().equals(node)) {
titleEle = (Element) titleList.item(i);
break;
}
}
if (titleEle != null) {
// Create the paragraph and list of prerequisites.
final Element itemisedListEle = doc.createElement("itemizedlist");
itemisedListEle.setAttribute("role", ROLE_PREREQUISITE_LIST);
final Element itemisedListTitleEle = doc.createElement("title");
final String prerequisiteTranslation = translations.getString(PREREQUISITE_PROPERTY);
itemisedListTitleEle.setTextContent(prerequisiteTranslation == null ? DEFAULT_PREREQUISITE : prerequisiteTranslation);
itemisedListEle.appendChild(itemisedListTitleEle);
final List<List<Element>> list = new LinkedList<List<Element>>();
// Add the Relationships
for (final Relationship prereq : specNode.getPrerequisiteRelationships()) {
if (prereq instanceof TopicRelationship) {
final SpecTopic relatedTopic = ((TopicRelationship) prereq).getSecondaryRelationship();
list.add(DocBookUtilities.buildXRef(doc, relatedTopic.getUniqueLinkId(useFixedUrls),
ROLE_PREREQUISITE));
} else {
final SpecNode relatedNode = ((TargetRelationship) prereq).getSecondaryRelationship();
list.add(DocBookUtilities.buildXRef(doc, relatedNode.getUniqueLinkId(useFixedUrls),
ROLE_PREREQUISITE));
}
}
// Wrap the items into an itemized list
final List<Element> items = DocBookUtilities.wrapItemsInListItems(doc, list);
for (final Element ele : items) {
itemisedListEle.appendChild(ele);
}
// Add the paragraph and list after the title node
Node nextNode = titleEle.getNextSibling();
while (nextNode != null && nextNode.getNodeType() != Node.ELEMENT_NODE && nextNode.getNodeType() != Node.COMMENT_NODE) {
nextNode = nextNode.getNextSibling();
}
node.insertBefore(itemisedListEle, nextNode);
}
} } | public class class_name {
public void processPrerequisiteInjections(final SpecNodeWithRelationships specNode, final Document doc, final Element node,
final boolean useFixedUrls) {
// Make sure we have some links to inject
if (specNode.getPrerequisiteRelationships().isEmpty()) return;
// Get the title element so that it can be used later to add the prerequisite topic nodes
Element titleEle = null;
final NodeList titleList = node.getElementsByTagName("title");
for (int i = 0; i < titleList.getLength(); i++) {
if (titleList.item(i).getParentNode().equals(node)) {
titleEle = (Element) titleList.item(i); // depends on control dependency: [if], data = [none]
break;
}
}
if (titleEle != null) {
// Create the paragraph and list of prerequisites.
final Element itemisedListEle = doc.createElement("itemizedlist");
itemisedListEle.setAttribute("role", ROLE_PREREQUISITE_LIST); // depends on control dependency: [if], data = [none]
final Element itemisedListTitleEle = doc.createElement("title");
final String prerequisiteTranslation = translations.getString(PREREQUISITE_PROPERTY);
itemisedListTitleEle.setTextContent(prerequisiteTranslation == null ? DEFAULT_PREREQUISITE : prerequisiteTranslation); // depends on control dependency: [if], data = [none]
itemisedListEle.appendChild(itemisedListTitleEle); // depends on control dependency: [if], data = [none]
final List<List<Element>> list = new LinkedList<List<Element>>();
// Add the Relationships
for (final Relationship prereq : specNode.getPrerequisiteRelationships()) {
if (prereq instanceof TopicRelationship) {
final SpecTopic relatedTopic = ((TopicRelationship) prereq).getSecondaryRelationship();
list.add(DocBookUtilities.buildXRef(doc, relatedTopic.getUniqueLinkId(useFixedUrls),
ROLE_PREREQUISITE)); // depends on control dependency: [if], data = [none]
} else {
final SpecNode relatedNode = ((TargetRelationship) prereq).getSecondaryRelationship();
list.add(DocBookUtilities.buildXRef(doc, relatedNode.getUniqueLinkId(useFixedUrls),
ROLE_PREREQUISITE)); // depends on control dependency: [if], data = [none]
}
}
// Wrap the items into an itemized list
final List<Element> items = DocBookUtilities.wrapItemsInListItems(doc, list);
for (final Element ele : items) {
itemisedListEle.appendChild(ele); // depends on control dependency: [for], data = [ele]
}
// Add the paragraph and list after the title node
Node nextNode = titleEle.getNextSibling();
while (nextNode != null && nextNode.getNodeType() != Node.ELEMENT_NODE && nextNode.getNodeType() != Node.COMMENT_NODE) {
nextNode = nextNode.getNextSibling(); // depends on control dependency: [while], data = [none]
}
node.insertBefore(itemisedListEle, nextNode); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void setVpnGatewayIds(java.util.Collection<String> vpnGatewayIds) {
if (vpnGatewayIds == null) {
this.vpnGatewayIds = null;
return;
}
this.vpnGatewayIds = new com.amazonaws.internal.SdkInternalList<String>(vpnGatewayIds);
} } | public class class_name {
public void setVpnGatewayIds(java.util.Collection<String> vpnGatewayIds) {
if (vpnGatewayIds == null) {
this.vpnGatewayIds = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.vpnGatewayIds = new com.amazonaws.internal.SdkInternalList<String>(vpnGatewayIds);
} } |
public class class_name {
public static String getErrorMessage(final com.authy.api.Error err) {
val builder = new StringBuilder();
if (err != null) {
builder.append("Authy Error");
if (StringUtils.isNotBlank(err.getCountryCode())) {
builder.append(": Country Code: ").append(err.getCountryCode());
}
if (StringUtils.isNotBlank(err.getMessage())) {
builder.append(": Message: ").append(err.getMessage());
}
} else {
builder.append("An unknown error has occurred. Check your API key and URL settings.");
}
return builder.toString();
} } | public class class_name {
public static String getErrorMessage(final com.authy.api.Error err) {
val builder = new StringBuilder();
if (err != null) {
builder.append("Authy Error"); // depends on control dependency: [if], data = [none]
if (StringUtils.isNotBlank(err.getCountryCode())) {
builder.append(": Country Code: ").append(err.getCountryCode()); // depends on control dependency: [if], data = [none]
}
if (StringUtils.isNotBlank(err.getMessage())) {
builder.append(": Message: ").append(err.getMessage()); // depends on control dependency: [if], data = [none]
}
} else {
builder.append("An unknown error has occurred. Check your API key and URL settings."); // depends on control dependency: [if], data = [none]
}
return builder.toString();
} } |
public class class_name {
private static void loadTag(ConfigServerImpl configServer, ConfigImpl config, Document doc, Log log) {
try {
Element parent = getChildByName(doc.getDocumentElement(), "tags");
{
Element[] tags = getChildren(parent, "tag");
Element tag;
ClassDefinition cd;
String nss, ns, n;
if (tags != null) {
for (int i = 0; i < tags.length; i++) {
tag = tags[i];
ns = getAttr(tag, "namespace");
nss = getAttr(tag, "namespace-seperator");
n = getAttr(tag, "name");
cd = getClassDefinition(tag, "", config.getIdentification());
config.addTag(ns, nss, n, CFMLEngine.DIALECT_BOTH, cd);
}
}
}
// set tag default values
Element[] defaults = getChildren(parent, "default");
if (!ArrayUtil.isEmpty(defaults)) {
Element def;
String tagName, attrName, attrValue;
Struct tags = new StructImpl(), tag;
Map<Key, Map<Key, Object>> trg = new HashMap<Key, Map<Key, Object>>();
for (int i = 0; i < defaults.length; i++) {
def = defaults[i];
tagName = getAttr(def, "tag");
attrName = getAttr(def, "attribute-name");
attrValue = getAttr(def, "attribute-value");
if (StringUtil.isEmpty(tagName) || StringUtil.isEmpty(attrName) || StringUtil.isEmpty(attrValue)) continue;
tag = (Struct) tags.get(tagName, null);
if (tag == null) {
tag = new StructImpl();
tags.setEL(tagName, tag);
}
tag.setEL(attrName, attrValue);
ApplicationContextSupport.initTagDefaultAttributeValues(config, trg, tags, CFMLEngine.DIALECT_CFML);
ApplicationContextSupport.initTagDefaultAttributeValues(config, trg, tags, CFMLEngine.DIALECT_LUCEE);
config.setTagDefaultAttributeValues(trg);
}
// initTagDefaultAttributeValues
}
}
catch (Exception e) {
log(config, log, e);
}
} } | public class class_name {
private static void loadTag(ConfigServerImpl configServer, ConfigImpl config, Document doc, Log log) {
try {
Element parent = getChildByName(doc.getDocumentElement(), "tags");
{
Element[] tags = getChildren(parent, "tag");
Element tag;
ClassDefinition cd;
String nss, ns, n;
if (tags != null) {
for (int i = 0; i < tags.length; i++) {
tag = tags[i]; // depends on control dependency: [for], data = [i]
ns = getAttr(tag, "namespace"); // depends on control dependency: [for], data = [none]
nss = getAttr(tag, "namespace-seperator"); // depends on control dependency: [for], data = [none]
n = getAttr(tag, "name"); // depends on control dependency: [for], data = [none]
cd = getClassDefinition(tag, "", config.getIdentification()); // depends on control dependency: [for], data = [none]
config.addTag(ns, nss, n, CFMLEngine.DIALECT_BOTH, cd); // depends on control dependency: [for], data = [none]
}
}
}
// set tag default values
Element[] defaults = getChildren(parent, "default");
if (!ArrayUtil.isEmpty(defaults)) {
Element def;
String tagName, attrName, attrValue;
Struct tags = new StructImpl(), tag;
Map<Key, Map<Key, Object>> trg = new HashMap<Key, Map<Key, Object>>();
for (int i = 0; i < defaults.length; i++) {
def = defaults[i]; // depends on control dependency: [for], data = [i]
tagName = getAttr(def, "tag"); // depends on control dependency: [for], data = [none]
attrName = getAttr(def, "attribute-name"); // depends on control dependency: [for], data = [none]
attrValue = getAttr(def, "attribute-value"); // depends on control dependency: [for], data = [none]
if (StringUtil.isEmpty(tagName) || StringUtil.isEmpty(attrName) || StringUtil.isEmpty(attrValue)) continue;
tag = (Struct) tags.get(tagName, null); // depends on control dependency: [for], data = [none]
if (tag == null) {
tag = new StructImpl(); // depends on control dependency: [if], data = [none]
tags.setEL(tagName, tag); // depends on control dependency: [if], data = [(tag]
}
tag.setEL(attrName, attrValue); // depends on control dependency: [for], data = [none]
ApplicationContextSupport.initTagDefaultAttributeValues(config, trg, tags, CFMLEngine.DIALECT_CFML); // depends on control dependency: [for], data = [none]
ApplicationContextSupport.initTagDefaultAttributeValues(config, trg, tags, CFMLEngine.DIALECT_LUCEE); // depends on control dependency: [for], data = [none]
config.setTagDefaultAttributeValues(trg); // depends on control dependency: [for], data = [none]
}
// initTagDefaultAttributeValues
}
}
catch (Exception e) {
log(config, log, e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static Page<Record> paginate(String where,
String sql_columns,
DTCriterias criterias,
List<Object> params) {
int pageSize = criterias.getLength();
int start = criterias.getStart() / pageSize + 1;
StringBuilder where_sql = new StringBuilder(where);
final List<Triple<String, Condition, Object>> custom_params = criterias.getParams();
if (!custom_params.isEmpty()) {
boolean append_and = StringUtils.containsIgnoreCase(where, "WHERE");
if (!append_and) {
where_sql.append(SQL_WHERE);
}
itemCustomParamSql(params, where_sql, custom_params, append_and);
}
final List<DTOrder> order = criterias.getOrder();
if (order != null && !order.isEmpty()) {
StringBuilder orderBy = new StringBuilder();
for (DTOrder _order : order)
orderBy.append(_order.getColumn()).append(StringPool.SPACE).append(_order.getDir());
final String byColumns = orderBy.toString();
if (!Strings.isNullOrEmpty(byColumns)) {
where_sql.append(" ORDER BY ").append(byColumns);
}
}
if (params == null || params.isEmpty()) {
return Db.paginate(start, pageSize, sql_columns, where_sql.toString());
} else {
return Db.paginate(start, pageSize, sql_columns, where_sql.toString(), params.toArray());
}
} } | public class class_name {
public static Page<Record> paginate(String where,
String sql_columns,
DTCriterias criterias,
List<Object> params) {
int pageSize = criterias.getLength();
int start = criterias.getStart() / pageSize + 1;
StringBuilder where_sql = new StringBuilder(where);
final List<Triple<String, Condition, Object>> custom_params = criterias.getParams();
if (!custom_params.isEmpty()) {
boolean append_and = StringUtils.containsIgnoreCase(where, "WHERE");
if (!append_and) {
where_sql.append(SQL_WHERE); // depends on control dependency: [if], data = [none]
}
itemCustomParamSql(params, where_sql, custom_params, append_and); // depends on control dependency: [if], data = [none]
}
final List<DTOrder> order = criterias.getOrder();
if (order != null && !order.isEmpty()) {
StringBuilder orderBy = new StringBuilder();
for (DTOrder _order : order)
orderBy.append(_order.getColumn()).append(StringPool.SPACE).append(_order.getDir());
final String byColumns = orderBy.toString();
if (!Strings.isNullOrEmpty(byColumns)) {
where_sql.append(" ORDER BY ").append(byColumns); // depends on control dependency: [if], data = [none]
}
}
if (params == null || params.isEmpty()) {
return Db.paginate(start, pageSize, sql_columns, where_sql.toString()); // depends on control dependency: [if], data = [none]
} else {
return Db.paginate(start, pageSize, sql_columns, where_sql.toString(), params.toArray()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public List<OrderedPair<T>> getOrderedPairs() {
List<OrderedPair<T>> pairs = new ArrayList<OrderedPair<T>>(list1.size()*list2.size());
for (T element1: list1) {
for (T element2: list2) {
pairs.add(new OrderedPair<T>(element1, element2));
}
}
return pairs;
} } | public class class_name {
public List<OrderedPair<T>> getOrderedPairs() {
List<OrderedPair<T>> pairs = new ArrayList<OrderedPair<T>>(list1.size()*list2.size());
for (T element1: list1) {
for (T element2: list2) {
pairs.add(new OrderedPair<T>(element1, element2)); // depends on control dependency: [for], data = [element2]
}
}
return pairs;
} } |
public class class_name {
public void setSettingExcelPattern(String settingExcelPattern) {
if(Utils.isEmpty(settingExcelPattern)) {
this.settingExcelPattern = Optional.empty();
} else {
this.settingExcelPattern = Optional.of(settingExcelPattern);
}
} } | public class class_name {
public void setSettingExcelPattern(String settingExcelPattern) {
if(Utils.isEmpty(settingExcelPattern)) {
this.settingExcelPattern = Optional.empty();
// depends on control dependency: [if], data = [none]
} else {
this.settingExcelPattern = Optional.of(settingExcelPattern);
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static EvaluatorRequest fromString(final String serializedRequest) {
try {
final Decoder decoder =
DecoderFactory.get().jsonDecoder(AvroEvaluatorRequest.getClassSchema(), serializedRequest);
final SpecificDatumReader<AvroEvaluatorRequest> reader = new SpecificDatumReader<>(AvroEvaluatorRequest.class);
return fromAvro(reader.read(null, decoder));
} catch (final IOException ex) {
throw new RuntimeException("Unable to deserialize compute request", ex);
}
} } | public class class_name {
public static EvaluatorRequest fromString(final String serializedRequest) {
try {
final Decoder decoder =
DecoderFactory.get().jsonDecoder(AvroEvaluatorRequest.getClassSchema(), serializedRequest);
final SpecificDatumReader<AvroEvaluatorRequest> reader = new SpecificDatumReader<>(AvroEvaluatorRequest.class);
return fromAvro(reader.read(null, decoder)); // depends on control dependency: [try], data = [none]
} catch (final IOException ex) {
throw new RuntimeException("Unable to deserialize compute request", ex);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
static int max(int[] input, int start, int length) {
assert length >= 1;
int max = input[start];
for (int i = length - 2, index = start + 1; i >= 0; i--, index++) {
final int v = input[index];
if (v > max) {
max = v;
}
}
return max;
} } | public class class_name {
static int max(int[] input, int start, int length) {
assert length >= 1;
int max = input[start];
for (int i = length - 2, index = start + 1; i >= 0; i--, index++) {
final int v = input[index];
if (v > max) {
max = v; // depends on control dependency: [if], data = [none]
}
}
return max;
} } |
public class class_name {
public static String join(SoyList list, String separator) {
List<String> stringList = new ArrayList<>();
for (SoyValue value : list.asResolvedJavaList()) {
stringList.add(value.coerceToString());
}
return Joiner.on(separator).join(stringList);
} } | public class class_name {
public static String join(SoyList list, String separator) {
List<String> stringList = new ArrayList<>();
for (SoyValue value : list.asResolvedJavaList()) {
stringList.add(value.coerceToString()); // depends on control dependency: [for], data = [value]
}
return Joiner.on(separator).join(stringList);
} } |
public class class_name {
public List<JAXBElement<Object>> get_GenericApplicationPropertyOfReliefComponent() {
if (_GenericApplicationPropertyOfReliefComponent == null) {
_GenericApplicationPropertyOfReliefComponent = new ArrayList<JAXBElement<Object>>();
}
return this._GenericApplicationPropertyOfReliefComponent;
} } | public class class_name {
public List<JAXBElement<Object>> get_GenericApplicationPropertyOfReliefComponent() {
if (_GenericApplicationPropertyOfReliefComponent == null) {
_GenericApplicationPropertyOfReliefComponent = new ArrayList<JAXBElement<Object>>(); // depends on control dependency: [if], data = [none]
}
return this._GenericApplicationPropertyOfReliefComponent;
} } |
public class class_name {
public CallAS7 resource(String... parts) {
for(String part : parts ) {
this.resource.add(part);
}
return this;
} } | public class class_name {
public CallAS7 resource(String... parts) {
for(String part : parts ) {
this.resource.add(part); // depends on control dependency: [for], data = [part]
}
return this;
} } |
public class class_name {
void removeInlinedFunctions() {
for (Map.Entry<String, FunctionState> entry : fns.entrySet()) {
String name = entry.getKey();
FunctionState functionState = entry.getValue();
if (functionState.canRemove()) {
Function fn = functionState.getFn();
checkState(functionState.canInline());
checkState(fn != null);
verifyAllReferencesInlined(name, functionState);
fn.remove();
NodeUtil.markFunctionsDeleted(fn.getFunctionNode(), compiler);
}
}
} } | public class class_name {
void removeInlinedFunctions() {
for (Map.Entry<String, FunctionState> entry : fns.entrySet()) {
String name = entry.getKey();
FunctionState functionState = entry.getValue();
if (functionState.canRemove()) {
Function fn = functionState.getFn();
checkState(functionState.canInline()); // depends on control dependency: [if], data = [none]
checkState(fn != null); // depends on control dependency: [if], data = [none]
verifyAllReferencesInlined(name, functionState); // depends on control dependency: [if], data = [none]
fn.remove(); // depends on control dependency: [if], data = [none]
NodeUtil.markFunctionsDeleted(fn.getFunctionNode(), compiler); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void run() {
try {
readStream();
} catch (EOFException eof) {
// Normal disconnect
} catch (SocketException se) {
// Do nothing if the exception occured while shutting down the
// component otherwise
// log the error and try to establish a new connection
if (!shutdown) {
component.getManager().getLog().error(se);
component.connectionLost();
}
} catch (XmlPullParserException ie) {
component.getManager().getLog().error(ie);
} catch (Exception e) {
component.getManager().getLog().warn(e);
}
} } | public class class_name {
public void run() {
try {
readStream(); // depends on control dependency: [try], data = [none]
} catch (EOFException eof) {
// Normal disconnect
} catch (SocketException se) { // depends on control dependency: [catch], data = [none]
// Do nothing if the exception occured while shutting down the
// component otherwise
// log the error and try to establish a new connection
if (!shutdown) {
component.getManager().getLog().error(se); // depends on control dependency: [if], data = [none]
component.connectionLost(); // depends on control dependency: [if], data = [none]
}
} catch (XmlPullParserException ie) { // depends on control dependency: [catch], data = [none]
component.getManager().getLog().error(ie);
} catch (Exception e) { // depends on control dependency: [catch], data = [none]
component.getManager().getLog().warn(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@SuppressWarnings("unused")
private Boolean loadBooleanProperty(String propertyKey) {
String booleanPropValue = prop.getProperty(propertyKey);
if (booleanPropValue != null) {
return Boolean.parseBoolean(booleanPropValue.trim());
} else {
return null;
}
} } | public class class_name {
@SuppressWarnings("unused")
private Boolean loadBooleanProperty(String propertyKey) {
String booleanPropValue = prop.getProperty(propertyKey);
if (booleanPropValue != null) {
return Boolean.parseBoolean(booleanPropValue.trim()); // depends on control dependency: [if], data = [(booleanPropValue]
} else {
return null; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
void store(Credential credential) {
accessToken = credential.getAccessToken();
refreshToken = credential.getRefreshToken();
expirationTimeMillis = credential.getExpirationTimeMilliseconds();
if (credential instanceof OAuthHmacCredential) {
OAuthHmacCredential oauth10aCredential = (OAuthHmacCredential) credential;
tokenSharedSecret = oauth10aCredential.getTokenSharedSecret();
consumerKey = oauth10aCredential.getConsumerKey();
sharedSecret = oauth10aCredential.getSharedSecret();
}
} } | public class class_name {
void store(Credential credential) {
accessToken = credential.getAccessToken();
refreshToken = credential.getRefreshToken();
expirationTimeMillis = credential.getExpirationTimeMilliseconds();
if (credential instanceof OAuthHmacCredential) {
OAuthHmacCredential oauth10aCredential = (OAuthHmacCredential) credential;
tokenSharedSecret = oauth10aCredential.getTokenSharedSecret(); // depends on control dependency: [if], data = [none]
consumerKey = oauth10aCredential.getConsumerKey(); // depends on control dependency: [if], data = [none]
sharedSecret = oauth10aCredential.getSharedSecret(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Unstable
protected MetaData getNonGeneratedContentMetaData(String parameterName)
{
MetaData metaData = new MetaData();
Type contentType;
if (this.macroDescriptor != null
&& this.macroDescriptor.getParameterDescriptorMap() != null
&& this.macroDescriptor.getParameterDescriptorMap().containsKey(parameterName)) {
contentType = this.macroDescriptor.getParameterDescriptorMap().get(parameterName).getDisplayType();
} else {
contentType = DefaultParameterDescriptor.DEFAULT_PARAMETER_TYPE;
}
String converted = ReflectionUtils.serializeType(contentType);
metaData.addMetaData(MetaData.NON_GENERATED_CONTENT, converted);
metaData.addMetaData(MetaData.PARAMETER_NAME, parameterName);
return metaData;
} } | public class class_name {
@Unstable
protected MetaData getNonGeneratedContentMetaData(String parameterName)
{
MetaData metaData = new MetaData();
Type contentType;
if (this.macroDescriptor != null
&& this.macroDescriptor.getParameterDescriptorMap() != null
&& this.macroDescriptor.getParameterDescriptorMap().containsKey(parameterName)) {
contentType = this.macroDescriptor.getParameterDescriptorMap().get(parameterName).getDisplayType(); // depends on control dependency: [if], data = [none]
} else {
contentType = DefaultParameterDescriptor.DEFAULT_PARAMETER_TYPE; // depends on control dependency: [if], data = [none]
}
String converted = ReflectionUtils.serializeType(contentType);
metaData.addMetaData(MetaData.NON_GENERATED_CONTENT, converted);
metaData.addMetaData(MetaData.PARAMETER_NAME, parameterName);
return metaData;
} } |
public class class_name {
private void load(final Properties properties, final String location,
final boolean overwriteOnly) {
for (Map.Entry<Object, Object> entry : properties.entrySet()) {
String key = (String) entry.getKey();
String already = get(key);
if (overwriteOnly && already == null && !INCLUDE.equals(key)) {
continue;
}
String value = (String) entry.getValue();
load(key, value, location);
}
} } | public class class_name {
private void load(final Properties properties, final String location,
final boolean overwriteOnly) {
for (Map.Entry<Object, Object> entry : properties.entrySet()) {
String key = (String) entry.getKey();
String already = get(key);
if (overwriteOnly && already == null && !INCLUDE.equals(key)) {
continue;
}
String value = (String) entry.getValue();
load(key, value, location); // depends on control dependency: [for], data = [none]
}
} } |
public class class_name {
@Override
protected void doPick()
{
if (mCollidables.size() > 0)
{
GVRPickedObject[] picked = null;
synchronized (mCollidables)
{
picked = pickBounds(mScene, mCollidables);
}
if (mPickClosest && (picked.length > 0))
{
GVRPickedObject closest = null;
float dist = 100000.0f;
for (GVRPickedObject hit : picked)
{
if ((hit != null) && (hit.hitDistance < dist))
{
dist = hit.hitDistance;
closest = hit;
}
}
if (closest != null)
{
picked = new GVRPickedObject[] { closest };
}
else
{
picked = sEmptyList;
}
}
generatePickEvents(picked);
}
} } | public class class_name {
@Override
protected void doPick()
{
if (mCollidables.size() > 0)
{
GVRPickedObject[] picked = null;
synchronized (mCollidables) // depends on control dependency: [if], data = [none]
{
picked = pickBounds(mScene, mCollidables);
}
if (mPickClosest && (picked.length > 0))
{
GVRPickedObject closest = null;
float dist = 100000.0f;
for (GVRPickedObject hit : picked)
{
if ((hit != null) && (hit.hitDistance < dist))
{
dist = hit.hitDistance; // depends on control dependency: [if], data = [none]
closest = hit; // depends on control dependency: [if], data = [none]
}
}
if (closest != null)
{
picked = new GVRPickedObject[] { closest }; // depends on control dependency: [if], data = [none]
}
else
{
picked = sEmptyList; // depends on control dependency: [if], data = [none]
}
}
generatePickEvents(picked); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
protected boolean shouldAuthorize(Authentication authentication) {
Assert.state(getAccessDecisionManager() != null, "The AccessDecisionManager can not be null!");
boolean authorize = false;
try {
if( authentication != null ) {
Object securedObject = getSecuredObject();
List<ConfigAttribute> cad = getConfigAttributeDefinition( securedObject );
getAccessDecisionManager().decide( authentication, getSecuredObject(), cad );
authorize = true;
}
} catch( AccessDeniedException e ) {
// This means the secured objects should not be authorized
}
return authorize;
} } | public class class_name {
protected boolean shouldAuthorize(Authentication authentication) {
Assert.state(getAccessDecisionManager() != null, "The AccessDecisionManager can not be null!");
boolean authorize = false;
try {
if( authentication != null ) {
Object securedObject = getSecuredObject();
List<ConfigAttribute> cad = getConfigAttributeDefinition( securedObject );
getAccessDecisionManager().decide( authentication, getSecuredObject(), cad ); // depends on control dependency: [if], data = [( authentication]
authorize = true; // depends on control dependency: [if], data = [none]
}
} catch( AccessDeniedException e ) {
// This means the secured objects should not be authorized
} // depends on control dependency: [catch], data = [none]
return authorize;
} } |
public class class_name {
public static void splitData(final String originalTrainingDataFile) {
List<Tuple> trainingData = NaiveBayesClassifier.readTrainingData(originalTrainingDataFile, "\\s");
List<Tuple> wrongData = new ArrayList<>();
int lastTrainingDataSize;
int iterCount = 0;
do {
System.out.println("Iteration:\t" + (++iterCount));
lastTrainingDataSize = trainingData.size();
NaiveBayesClassifier nbc = new NaiveBayesClassifier();
nbc.train(trainingData);
Iterator<Tuple> trainingDataIter = trainingData.iterator();
while (trainingDataIter.hasNext()) {
Tuple t = trainingDataIter.next();
String actual = nbc.predictLabel(t);
if (!t.label.equals(actual) && !t.label.equals("1")) { // preserve 1 since too few.
wrongData.add(t);
trainingDataIter.remove();
}
}
Iterator<Tuple> wrongDataIter = wrongData.iterator();
while (wrongDataIter.hasNext()) {
Tuple t = wrongDataIter.next();
String actual = nbc.predictLabel(t);
if (t.label.equals(actual)) {
trainingData.add(t);
wrongDataIter.remove();
}
}
} while (trainingData.size() != lastTrainingDataSize);
writeToFile(trainingData, originalTrainingDataFile + ".aligned");
writeToFile(wrongData, originalTrainingDataFile + ".wrong");
} } | public class class_name {
public static void splitData(final String originalTrainingDataFile) {
List<Tuple> trainingData = NaiveBayesClassifier.readTrainingData(originalTrainingDataFile, "\\s");
List<Tuple> wrongData = new ArrayList<>();
int lastTrainingDataSize;
int iterCount = 0;
do {
System.out.println("Iteration:\t" + (++iterCount));
lastTrainingDataSize = trainingData.size();
NaiveBayesClassifier nbc = new NaiveBayesClassifier();
nbc.train(trainingData);
Iterator<Tuple> trainingDataIter = trainingData.iterator();
while (trainingDataIter.hasNext()) {
Tuple t = trainingDataIter.next();
String actual = nbc.predictLabel(t);
if (!t.label.equals(actual) && !t.label.equals("1")) { // preserve 1 since too few.
wrongData.add(t); // depends on control dependency: [if], data = [none]
trainingDataIter.remove(); // depends on control dependency: [if], data = [none]
}
}
Iterator<Tuple> wrongDataIter = wrongData.iterator();
while (wrongDataIter.hasNext()) {
Tuple t = wrongDataIter.next();
String actual = nbc.predictLabel(t);
if (t.label.equals(actual)) {
trainingData.add(t); // depends on control dependency: [if], data = [none]
wrongDataIter.remove(); // depends on control dependency: [if], data = [none]
}
}
} while (trainingData.size() != lastTrainingDataSize);
writeToFile(trainingData, originalTrainingDataFile + ".aligned");
writeToFile(wrongData, originalTrainingDataFile + ".wrong");
} } |
public class class_name {
public void replaceFromToWithFrom(int from, int to, ObjectArrayList other, int otherFrom) {
int length=to-from+1;
if (length>0) {
checkRangeFromTo(from, to, size);
checkRangeFromTo(otherFrom,otherFrom+length-1,other.size);
System.arraycopy(other.elements, otherFrom, elements, from, length);
}
} } | public class class_name {
public void replaceFromToWithFrom(int from, int to, ObjectArrayList other, int otherFrom) {
int length=to-from+1;
if (length>0) {
checkRangeFromTo(from, to, size);
// depends on control dependency: [if], data = [none]
checkRangeFromTo(otherFrom,otherFrom+length-1,other.size);
// depends on control dependency: [if], data = [none]
System.arraycopy(other.elements, otherFrom, elements, from, length);
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private void handleRestoreDefaultsButtonTextArgument(@NonNull final Bundle arguments) {
CharSequence buttonText =
getCharSequenceFromArguments(arguments, EXTRA_RESTORE_DEFAULTS_BUTTON_TEXT);
if (!TextUtils.isEmpty(buttonText)) {
setRestoreDefaultsButtonText(buttonText);
}
} } | public class class_name {
private void handleRestoreDefaultsButtonTextArgument(@NonNull final Bundle arguments) {
CharSequence buttonText =
getCharSequenceFromArguments(arguments, EXTRA_RESTORE_DEFAULTS_BUTTON_TEXT);
if (!TextUtils.isEmpty(buttonText)) {
setRestoreDefaultsButtonText(buttonText); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String getServiceURL(KubernetesClient client, String serviceName, String serviceNamespace, String serviceProtocol, boolean serviceExternal) {
Service srv = null;
String serviceHost = serviceToHostOrBlank(serviceName);
String servicePort = serviceToPortOrBlank(serviceName);
String serviceProto = serviceProtocol != null ? serviceProtocol : serviceToProtocol(serviceName, servicePort);
//Use specified or fallback namespace.
String actualNamespace = StringUtils.isNotBlank(serviceNamespace) ? serviceNamespace : client.getNamespace();
//1. Inside Kubernetes: Services as ENV vars
if (!serviceExternal && StringUtils.isNotBlank(serviceHost) && StringUtils.isNotBlank(servicePort) && StringUtils.isNotBlank(serviceProtocol)) {
return serviceProtocol + "://" + serviceHost + ":" + servicePort;
//2. Anywhere: When namespace is passed System / Env var. Mostly needed for integration tests.
} else if (StringUtils.isNotBlank(actualNamespace)) {
srv = client.services().inNamespace(actualNamespace).withName(serviceName).get();
}
if (srv == null) {
// lets try use environment variables
String hostAndPort = getServiceHostAndPort(serviceName, "", "");
if (!hostAndPort.startsWith(":")) {
return serviceProto + "://" + hostAndPort;
}
}
if (srv == null) {
throw new IllegalArgumentException("No kubernetes service could be found for name: " + serviceName + " in namespace: " + actualNamespace);
}
String answer = KubernetesHelper.getOrCreateAnnotations(srv).get(Fabric8Annotations.SERVICE_EXPOSE_URL.toString());
if (StringUtils.isNotBlank(answer)) {
return answer;
}
if (OpenshiftHelper.isOpenShift(client)) {
OpenShiftClient openShiftClient = client.adapt(OpenShiftClient.class);
Route route = openShiftClient.routes().inNamespace(actualNamespace).withName(serviceName).get();
if (route != null) {
return (serviceProto + "://" + route.getSpec().getHost()).toLowerCase();
}
}
ServicePort port = findServicePortByName(srv, null);
if (port == null) {
throw new RuntimeException("Couldn't find port: " + null + " for service:" + serviceName);
}
String clusterIP = srv.getSpec().getClusterIP();
if ("None".equals(clusterIP)) {
throw new IllegalStateException("Service: " + serviceName + " in namespace:" + serviceNamespace + "is head-less. Search for endpoints instead.");
}
Integer portNumber = port.getPort();
if (StringUtils.isBlank(clusterIP)) {
IngressList ingresses = client.extensions().ingresses().inNamespace(serviceNamespace).list();
if (ingresses != null) {
List<Ingress> items = ingresses.getItems();
if (items != null) {
for (Ingress item : items) {
String ns = KubernetesHelper.getNamespace(item);
if (Objects.equal(serviceNamespace, ns)) {
IngressSpec spec = item.getSpec();
if (spec != null) {
List<IngressRule> rules = spec.getRules();
List<IngressTLS> tls = spec.getTls();
if (rules != null) {
for (IngressRule rule : rules) {
HTTPIngressRuleValue http = rule.getHttp();
if (http != null) {
List<HTTPIngressPath> paths = http.getPaths();
if (paths != null) {
for (HTTPIngressPath path : paths) {
IngressBackend backend = path.getBackend();
if (backend != null) {
String backendServiceName = backend.getServiceName();
if (serviceName.equals(backendServiceName) && portsMatch(port, backend.getServicePort())) {
String pathPostfix = path.getPath();
if (tls != null) {
for (IngressTLS tlsHost : tls) {
List<String> hosts = tlsHost.getHosts();
if (hosts != null) {
for (String host : hosts) {
if (StringUtils.isNotBlank(host)) {
return String.format("https://%s/%s", host, preparePath(pathPostfix));
}
}
}
}
}
answer = rule.getHost();
if (StringUtils.isNotBlank(answer)) {
return String.format("http://%s/%s",answer, preparePath(pathPostfix));
}
}
}
}
}
}
}
}
}
}
}
}
}
// lets try use the status on GKE
ServiceStatus status = srv.getStatus();
if (status != null) {
LoadBalancerStatus loadBalancerStatus = status.getLoadBalancer();
if (loadBalancerStatus != null) {
List<LoadBalancerIngress> loadBalancerIngresses = loadBalancerStatus.getIngress();
if (loadBalancerIngresses != null) {
for (LoadBalancerIngress loadBalancerIngress : loadBalancerIngresses) {
String ip = loadBalancerIngress.getIp();
if (StringUtils.isNotBlank(ip)) {
clusterIP = ip;
break;
}
}
}
}
}
}
if (StringUtils.isBlank(clusterIP)) {
// on vanilla kubernetes we can use nodePort to access things externally
boolean found = false;
Integer nodePort = port.getNodePort();
if (nodePort != null) {
NodeList nodeList = client.nodes().list();
if (nodeList != null) {
List<Node> items = nodeList.getItems();
if (items != null) {
for (Node item : items) {
NodeStatus status = item.getStatus();
if (!found && status != null) {
List<NodeAddress> addresses = status.getAddresses();
if (addresses != null) {
for (NodeAddress address : addresses) {
String ip = address.getAddress();
if (StringUtils.isNotBlank(ip)) {
clusterIP = ip;
portNumber = nodePort;
found = true;
break;
}
}
}
}
if (!found) {
NodeSpec spec = item.getSpec();
if (spec != null) {
clusterIP = spec.getExternalID();
if (StringUtils.isNotBlank(clusterIP)) {
portNumber = nodePort;
break;
}
}
}
}
}
}
}
}
return (serviceProto + "://" + clusterIP + ":" + portNumber).toLowerCase();
} } | public class class_name {
public static String getServiceURL(KubernetesClient client, String serviceName, String serviceNamespace, String serviceProtocol, boolean serviceExternal) {
Service srv = null;
String serviceHost = serviceToHostOrBlank(serviceName);
String servicePort = serviceToPortOrBlank(serviceName);
String serviceProto = serviceProtocol != null ? serviceProtocol : serviceToProtocol(serviceName, servicePort);
//Use specified or fallback namespace.
String actualNamespace = StringUtils.isNotBlank(serviceNamespace) ? serviceNamespace : client.getNamespace();
//1. Inside Kubernetes: Services as ENV vars
if (!serviceExternal && StringUtils.isNotBlank(serviceHost) && StringUtils.isNotBlank(servicePort) && StringUtils.isNotBlank(serviceProtocol)) {
return serviceProtocol + "://" + serviceHost + ":" + servicePort;
//2. Anywhere: When namespace is passed System / Env var. Mostly needed for integration tests.
} else if (StringUtils.isNotBlank(actualNamespace)) {
srv = client.services().inNamespace(actualNamespace).withName(serviceName).get(); // depends on control dependency: [if], data = [none]
}
if (srv == null) {
// lets try use environment variables
String hostAndPort = getServiceHostAndPort(serviceName, "", "");
if (!hostAndPort.startsWith(":")) {
return serviceProto + "://" + hostAndPort;
}
}
if (srv == null) {
throw new IllegalArgumentException("No kubernetes service could be found for name: " + serviceName + " in namespace: " + actualNamespace);
}
String answer = KubernetesHelper.getOrCreateAnnotations(srv).get(Fabric8Annotations.SERVICE_EXPOSE_URL.toString());
if (StringUtils.isNotBlank(answer)) {
return answer; // depends on control dependency: [if], data = [none]
}
if (OpenshiftHelper.isOpenShift(client)) {
OpenShiftClient openShiftClient = client.adapt(OpenShiftClient.class);
Route route = openShiftClient.routes().inNamespace(actualNamespace).withName(serviceName).get();
if (route != null) {
return (serviceProto + "://" + route.getSpec().getHost()).toLowerCase();
}
}
ServicePort port = findServicePortByName(srv, null);
if (port == null) {
throw new RuntimeException("Couldn't find port: " + null + " for service:" + serviceName);
}
String clusterIP = srv.getSpec().getClusterIP();
if ("None".equals(clusterIP)) {
throw new IllegalStateException("Service: " + serviceName + " in namespace:" + serviceNamespace + "is head-less. Search for endpoints instead.");
}
Integer portNumber = port.getPort();
if (StringUtils.isBlank(clusterIP)) {
IngressList ingresses = client.extensions().ingresses().inNamespace(serviceNamespace).list();
if (ingresses != null) {
List<Ingress> items = ingresses.getItems();
if (items != null) {
for (Ingress item : items) {
String ns = KubernetesHelper.getNamespace(item);
if (Objects.equal(serviceNamespace, ns)) {
IngressSpec spec = item.getSpec();
if (spec != null) {
List<IngressRule> rules = spec.getRules();
List<IngressTLS> tls = spec.getTls();
if (rules != null) {
for (IngressRule rule : rules) {
HTTPIngressRuleValue http = rule.getHttp();
if (http != null) {
List<HTTPIngressPath> paths = http.getPaths();
if (paths != null) {
for (HTTPIngressPath path : paths) {
IngressBackend backend = path.getBackend();
if (backend != null) {
String backendServiceName = backend.getServiceName();
if (serviceName.equals(backendServiceName) && portsMatch(port, backend.getServicePort())) {
String pathPostfix = path.getPath();
if (tls != null) {
for (IngressTLS tlsHost : tls) {
List<String> hosts = tlsHost.getHosts();
if (hosts != null) {
for (String host : hosts) {
if (StringUtils.isNotBlank(host)) {
return String.format("https://%s/%s", host, preparePath(pathPostfix));
}
}
}
}
}
answer = rule.getHost(); // depends on control dependency: [if], data = [none]
if (StringUtils.isNotBlank(answer)) {
return String.format("http://%s/%s",answer, preparePath(pathPostfix));
}
}
}
}
}
}
}
}
}
}
}
}
}
// lets try use the status on GKE
ServiceStatus status = srv.getStatus(); // depends on control dependency: [if], data = [none]
if (status != null) {
LoadBalancerStatus loadBalancerStatus = status.getLoadBalancer();
if (loadBalancerStatus != null) {
List<LoadBalancerIngress> loadBalancerIngresses = loadBalancerStatus.getIngress();
if (loadBalancerIngresses != null) {
for (LoadBalancerIngress loadBalancerIngress : loadBalancerIngresses) {
String ip = loadBalancerIngress.getIp();
if (StringUtils.isNotBlank(ip)) {
clusterIP = ip; // depends on control dependency: [if], data = [none]
break;
}
}
}
}
}
}
if (StringUtils.isBlank(clusterIP)) {
// on vanilla kubernetes we can use nodePort to access things externally
boolean found = false;
Integer nodePort = port.getNodePort();
if (nodePort != null) {
NodeList nodeList = client.nodes().list();
if (nodeList != null) {
List<Node> items = nodeList.getItems();
if (items != null) {
for (Node item : items) {
NodeStatus status = item.getStatus();
if (!found && status != null) {
List<NodeAddress> addresses = status.getAddresses();
if (addresses != null) {
for (NodeAddress address : addresses) {
String ip = address.getAddress();
if (StringUtils.isNotBlank(ip)) {
clusterIP = ip; // depends on control dependency: [if], data = [none]
portNumber = nodePort; // depends on control dependency: [if], data = [none]
found = true; // depends on control dependency: [if], data = [none]
break;
}
}
}
}
if (!found) {
NodeSpec spec = item.getSpec();
if (spec != null) {
clusterIP = spec.getExternalID(); // depends on control dependency: [if], data = [none]
if (StringUtils.isNotBlank(clusterIP)) {
portNumber = nodePort; // depends on control dependency: [if], data = [none]
break;
}
}
}
}
}
}
}
}
return (serviceProto + "://" + clusterIP + ":" + portNumber).toLowerCase();
} } |
public class class_name {
public void setOnProgressView(View v) {
int progressViewVisibility = -1;
if (this.onProgressView != null) {
progressViewVisibility = this.onProgressView.getVisibility();
removeView(this.onProgressView);
}
this.onProgressView = v;
if (this.onProgressView != null) {
addView(this.onProgressView);
if (progressViewVisibility != -1)
this.onProgressView.setVisibility(progressViewVisibility);
}
} } | public class class_name {
public void setOnProgressView(View v) {
int progressViewVisibility = -1;
if (this.onProgressView != null) {
progressViewVisibility = this.onProgressView.getVisibility(); // depends on control dependency: [if], data = [none]
removeView(this.onProgressView); // depends on control dependency: [if], data = [(this.onProgressView]
}
this.onProgressView = v;
if (this.onProgressView != null) {
addView(this.onProgressView); // depends on control dependency: [if], data = [(this.onProgressView]
if (progressViewVisibility != -1)
this.onProgressView.setVisibility(progressViewVisibility);
}
} } |
public class class_name {
protected String getCommonSuperClass(final String type1, final String type2) {
ClassLoader classLoader = getClassLoader();
Class<?> class1;
try {
class1 = Class.forName(type1.replace('/', '.'), false, classLoader);
} catch (ClassNotFoundException e) {
throw new TypeNotPresentException(type1, e);
}
Class<?> class2;
try {
class2 = Class.forName(type2.replace('/', '.'), false, classLoader);
} catch (ClassNotFoundException e) {
throw new TypeNotPresentException(type2, e);
}
if (class1.isAssignableFrom(class2)) {
return type1;
}
if (class2.isAssignableFrom(class1)) {
return type2;
}
if (class1.isInterface() || class2.isInterface()) {
return "java/lang/Object";
} else {
do {
class1 = class1.getSuperclass();
} while (!class1.isAssignableFrom(class2));
return class1.getName().replace('.', '/');
}
} } | public class class_name {
protected String getCommonSuperClass(final String type1, final String type2) {
ClassLoader classLoader = getClassLoader();
Class<?> class1;
try {
class1 = Class.forName(type1.replace('/', '.'), false, classLoader); // depends on control dependency: [try], data = [none]
} catch (ClassNotFoundException e) {
throw new TypeNotPresentException(type1, e);
} // depends on control dependency: [catch], data = [none]
Class<?> class2;
try {
class2 = Class.forName(type2.replace('/', '.'), false, classLoader); // depends on control dependency: [try], data = [none]
} catch (ClassNotFoundException e) {
throw new TypeNotPresentException(type2, e);
} // depends on control dependency: [catch], data = [none]
if (class1.isAssignableFrom(class2)) {
return type1; // depends on control dependency: [if], data = [none]
}
if (class2.isAssignableFrom(class1)) {
return type2; // depends on control dependency: [if], data = [none]
}
if (class1.isInterface() || class2.isInterface()) {
return "java/lang/Object"; // depends on control dependency: [if], data = [none]
} else {
do {
class1 = class1.getSuperclass();
} while (!class1.isAssignableFrom(class2));
return class1.getName().replace('.', '/'); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String replace(String s, String replace, String with) {
if (s == null || replace == null) {
return s;
}
if (with == null) {
with = "";
}
StringBuffer b = new StringBuffer();
int start = 0;
int lenreplace = replace.length();
while (true) {
int i = s.indexOf(replace, start);
if (i == -1) {
b.append(s.substring(start));
break;
}
b.append(s.substring(start, i));
b.append(with);
start = i + lenreplace;
}
return b.toString();
} } | public class class_name {
public static String replace(String s, String replace, String with) {
if (s == null || replace == null) {
return s; // depends on control dependency: [if], data = [none]
}
if (with == null) {
with = "";
}
StringBuffer b = new StringBuffer();
int start = 0;
int lenreplace = replace.length();
while (true) {
int i = s.indexOf(replace, start);
if (i == -1) {
b.append(s.substring(start)); // depends on control dependency: [if], data = [none]
break;
}
b.append(s.substring(start, i)); // depends on control dependency: [while], data = [none]
b.append(with); // depends on control dependency: [while], data = [none]
start = i + lenreplace; // depends on control dependency: [while], data = [none]
}
return b.toString();
} } |
public class class_name {
static Map<String, Object> sizePayload(final Object min, final Object max) {
if (min == null || max == null) {
return null;
}
Map<String, Object> payload = new LinkedHashMap<>();
payload.put("min", min);
payload.put("max", max);
payload.put("message", MSG_PREFIX + VALIDATORS.get(Size.class));
return payload;
} } | public class class_name {
static Map<String, Object> sizePayload(final Object min, final Object max) {
if (min == null || max == null) {
return null; // depends on control dependency: [if], data = [none]
}
Map<String, Object> payload = new LinkedHashMap<>();
payload.put("min", min);
payload.put("max", max);
payload.put("message", MSG_PREFIX + VALIDATORS.get(Size.class));
return payload;
} } |
public class class_name {
public boolean setGenreString(String str)
{
int result = NullsoftID3GenreTable.getGenre(str);
boolean retval = false;
if (result != -1)
{
genre = result;
retval = true;
}
return retval;
} } | public class class_name {
public boolean setGenreString(String str)
{
int result = NullsoftID3GenreTable.getGenre(str);
boolean retval = false;
if (result != -1)
{
genre = result; // depends on control dependency: [if], data = [none]
retval = true; // depends on control dependency: [if], data = [none]
}
return retval;
} } |
public class class_name {
public EEnum getPPORGProcFlgs() {
if (pporgProcFlgsEEnum == null) {
pporgProcFlgsEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(123);
}
return pporgProcFlgsEEnum;
} } | public class class_name {
public EEnum getPPORGProcFlgs() {
if (pporgProcFlgsEEnum == null) {
pporgProcFlgsEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(123); // depends on control dependency: [if], data = [none]
}
return pporgProcFlgsEEnum;
} } |
public class class_name {
static int linearPathIntersectsLinearPathMaxDim_(MultiPath _multipathA,
MultiPath _multipathB, double tolerance,
AttributeStreamOfDbl intersections) {
MultiPath multipathA;
MultiPath multipathB;
if (_multipathA.getSegmentCount() > _multipathB.getSegmentCount()) {
multipathA = _multipathB;
multipathB = _multipathA;
} else {
multipathA = _multipathA;
multipathB = _multipathB;
}
SegmentIteratorImpl segIterA = ((MultiPathImpl) multipathA._getImpl())
.querySegmentIterator();
SegmentIteratorImpl segIterB = ((MultiPathImpl) multipathB._getImpl())
.querySegmentIterator();
double[] scalarsA = new double[2];
double[] scalarsB = new double[2];
int dim = -1;
int ievent = 0;
double overlapLength;
AttributeStreamOfInt32 eventIndices = new AttributeStreamOfInt32(0);
RelationalOperations relOps = new RelationalOperations();
OverlapComparer overlapComparer = new OverlapComparer(relOps);
OverlapEvent overlapEvent;
Envelope2D env_a = new Envelope2D();
Envelope2D env_b = new Envelope2D();
Envelope2D envInter = new Envelope2D();
multipathA.queryEnvelope2D(env_a);
multipathB.queryEnvelope2D(env_b);
env_a.inflate(tolerance, tolerance);
env_b.inflate(tolerance, tolerance);
envInter.setCoords(env_a);
envInter.intersect(env_b);
Point2D int_point = null;
if (intersections != null) {
int_point = new Point2D();
}
QuadTreeImpl qtB = null;
QuadTreeImpl quadTreeB = null;
QuadTreeImpl quadTreePathsB = null;
GeometryAccelerators accel = ((MultiPathImpl) multipathB._getImpl())
._getAccelerators();
if (accel != null) {
quadTreeB = accel.getQuadTree();
quadTreePathsB = accel.getQuadTreeForPaths();
if (quadTreeB == null) {
qtB = InternalUtils.buildQuadTree(
(MultiPathImpl) multipathB._getImpl(), envInter);
quadTreeB = qtB;
}
} else {
qtB = InternalUtils.buildQuadTree(
(MultiPathImpl) multipathB._getImpl(), envInter);
quadTreeB = qtB;
}
QuadTreeImpl.QuadTreeIteratorImpl qtIterB = quadTreeB.getIterator();
QuadTreeImpl.QuadTreeIteratorImpl qtIterPathsB = null;
if (quadTreePathsB != null)
qtIterPathsB = quadTreePathsB.getIterator();
while (segIterA.nextPath()) {
overlapLength = 0.0;
while (segIterA.hasNextSegment()) {
Segment segmentA = segIterA.nextSegment();
segmentA.queryEnvelope2D(env_a);
if (!env_a.isIntersecting(envInter)) {
continue;
}
if (qtIterPathsB != null) {
qtIterPathsB.resetIterator(env_a, tolerance);
if (qtIterPathsB.next() == -1)
continue;
}
double lengthA = segmentA.calculateLength2D();
qtIterB.resetIterator(segmentA, tolerance);
for (int elementHandleB = qtIterB.next(); elementHandleB != -1; elementHandleB = qtIterB
.next()) {
int vertex_b = quadTreeB.getElement(elementHandleB);
segIterB.resetToVertex(vertex_b);
Segment segmentB = segIterB.nextSegment();
double lengthB = segmentB.calculateLength2D();
int result = segmentA.intersect(segmentB, null, scalarsA,
scalarsB, tolerance);
if (result > 0) {
double scalar_a_0 = scalarsA[0];
double scalar_b_0 = scalarsB[0];
double scalar_a_1 = (result == 2 ? scalarsA[1]
: NumberUtils.TheNaN);
double scalar_b_1 = (result == 2 ? scalarsB[1]
: NumberUtils.TheNaN);
if (result == 2) {
if (lengthA * (scalar_a_1 - scalar_a_0) > tolerance) {
dim = 1;
return dim;
}
// Quick neighbor check
double length = lengthA * (scalar_a_1 - scalar_a_0);
if (segIterB.hasNextSegment()) {
segmentB = segIterB.nextSegment();
result = segmentA.intersect(segmentB, null,
scalarsA, null, tolerance);
if (result == 2) {
double nextScalarA0 = scalarsA[0];
double nextScalarA1 = scalarsA[1];
double lengthNext = lengthA
* (nextScalarA1 - nextScalarA0);
if (length + lengthNext > tolerance) {
dim = 1;
return dim;
}
}
segIterB.resetToVertex(vertex_b);
segIterB.nextSegment();
}
if (!segIterB.isFirstSegmentInPath()) {
segIterB.previousSegment();
segmentB = segIterB.previousSegment();
result = segmentA.intersect(segmentB, null,
scalarsA, null, tolerance);
if (result == 2) {
double nextScalarA0 = scalarsA[0];
double nextScalarA1 = scalarsA[1];
double lengthPrevious = lengthA
* (nextScalarA1 - nextScalarA0);
if (length + lengthPrevious > tolerance) {
dim = 1;
return dim;
}
}
segIterB.resetToVertex(vertex_b);
segIterB.nextSegment();
}
if (segIterA.hasNextSegment()) {
int vertex_a = segIterA.getStartPointIndex();
segmentA = segIterA.nextSegment();
result = segmentA.intersect(segmentB, null,
scalarsA, null, tolerance);
if (result == 2) {
double nextScalarA0 = scalarsA[0];
double nextScalarA1 = scalarsA[1];
double lengthNext = lengthA
* (nextScalarA1 - nextScalarA0);
if (length + lengthNext > tolerance) {
dim = 1;
return dim;
}
}
segIterA.resetToVertex(vertex_a);
segIterA.nextSegment();
}
if (!segIterA.isFirstSegmentInPath()) {
int vertex_a = segIterA.getStartPointIndex();
segIterA.previousSegment();
segmentA = segIterA.previousSegment();
result = segmentA.intersect(segmentB, null,
scalarsA, null, tolerance);
if (result == 2) {
double nextScalarA0 = scalarsA[0];
double nextScalarA1 = scalarsA[1];
double lengthPrevious = lengthB
* (nextScalarA1 - nextScalarA0);
if (length + lengthPrevious > tolerance) {
dim = 1;
return dim;
}
}
segIterA.resetToVertex(vertex_a);
segIterA.nextSegment();
}
int ivertex_a = segIterA.getStartPointIndex();
int ipath_a = segIterA.getPathIndex();
int ivertex_b = segIterB.getStartPointIndex();
int ipath_b = segIterB.getPathIndex();
overlapEvent = OverlapEvent.construct(ivertex_a,
ipath_a, scalar_a_0, scalar_a_1, ivertex_b,
ipath_b, scalar_b_0, scalar_b_1);
relOps.m_overlap_events.add(overlapEvent);
eventIndices.add(eventIndices.size());
}
dim = 0;
if (intersections != null) {
segmentA.getCoord2D(scalar_a_0, int_point);
intersections.add(int_point.x);
intersections.add(int_point.y);
}
}
}
if (ievent < relOps.m_overlap_events.size()) {
eventIndices.Sort(ievent, eventIndices.size(),
overlapComparer);
double lastScalar = 0.0;
int lastPath = relOps.m_overlap_events.get(eventIndices
.get(ievent)).m_ipath_a;
for (int i = ievent; i < relOps.m_overlap_events.size(); i++) {
overlapEvent = relOps.m_overlap_events.get(eventIndices
.get(i));
if (overlapEvent.m_scalar_a_0 < lastScalar
&& overlapEvent.m_scalar_a_1 < lastScalar) {
continue;
}
if (lengthA * (overlapEvent.m_scalar_a_0 - lastScalar) > tolerance) {
overlapLength = lengthA
* (overlapEvent.m_scalar_a_1 - overlapEvent.m_scalar_a_0); // reset
lastScalar = overlapEvent.m_scalar_a_1;
lastPath = overlapEvent.m_ipath_a;
} else {
if (overlapEvent.m_ipath_a != lastPath) {
overlapLength = lengthA
* (overlapEvent.m_scalar_a_1 - overlapEvent.m_scalar_a_0); // reset
lastPath = overlapEvent.m_ipath_a;
} else {
overlapLength += lengthA
* (overlapEvent.m_scalar_a_1 - overlapEvent.m_scalar_a_0); // accumulate
}
if (overlapLength > tolerance) {
dim = 1;
return dim;
}
lastScalar = overlapEvent.m_scalar_a_1;
if (lastScalar == 1.0) {
break;
}
}
}
if (lengthA * (1.0 - lastScalar) > tolerance) {
overlapLength = 0.0; // reset
}
ievent = 0;
eventIndices.resize(0);
relOps.m_overlap_events.clear();
}
}
}
return dim;
} } | public class class_name {
static int linearPathIntersectsLinearPathMaxDim_(MultiPath _multipathA,
MultiPath _multipathB, double tolerance,
AttributeStreamOfDbl intersections) {
MultiPath multipathA;
MultiPath multipathB;
if (_multipathA.getSegmentCount() > _multipathB.getSegmentCount()) {
multipathA = _multipathB; // depends on control dependency: [if], data = [none]
multipathB = _multipathA; // depends on control dependency: [if], data = [none]
} else {
multipathA = _multipathA; // depends on control dependency: [if], data = [none]
multipathB = _multipathB; // depends on control dependency: [if], data = [none]
}
SegmentIteratorImpl segIterA = ((MultiPathImpl) multipathA._getImpl())
.querySegmentIterator();
SegmentIteratorImpl segIterB = ((MultiPathImpl) multipathB._getImpl())
.querySegmentIterator();
double[] scalarsA = new double[2];
double[] scalarsB = new double[2];
int dim = -1;
int ievent = 0;
double overlapLength;
AttributeStreamOfInt32 eventIndices = new AttributeStreamOfInt32(0);
RelationalOperations relOps = new RelationalOperations();
OverlapComparer overlapComparer = new OverlapComparer(relOps);
OverlapEvent overlapEvent;
Envelope2D env_a = new Envelope2D();
Envelope2D env_b = new Envelope2D();
Envelope2D envInter = new Envelope2D();
multipathA.queryEnvelope2D(env_a);
multipathB.queryEnvelope2D(env_b);
env_a.inflate(tolerance, tolerance);
env_b.inflate(tolerance, tolerance);
envInter.setCoords(env_a);
envInter.intersect(env_b);
Point2D int_point = null;
if (intersections != null) {
int_point = new Point2D(); // depends on control dependency: [if], data = [none]
}
QuadTreeImpl qtB = null;
QuadTreeImpl quadTreeB = null;
QuadTreeImpl quadTreePathsB = null;
GeometryAccelerators accel = ((MultiPathImpl) multipathB._getImpl())
._getAccelerators();
if (accel != null) {
quadTreeB = accel.getQuadTree(); // depends on control dependency: [if], data = [none]
quadTreePathsB = accel.getQuadTreeForPaths(); // depends on control dependency: [if], data = [none]
if (quadTreeB == null) {
qtB = InternalUtils.buildQuadTree(
(MultiPathImpl) multipathB._getImpl(), envInter); // depends on control dependency: [if], data = [none]
quadTreeB = qtB; // depends on control dependency: [if], data = [none]
}
} else {
qtB = InternalUtils.buildQuadTree(
(MultiPathImpl) multipathB._getImpl(), envInter); // depends on control dependency: [if], data = [none]
quadTreeB = qtB; // depends on control dependency: [if], data = [none]
}
QuadTreeImpl.QuadTreeIteratorImpl qtIterB = quadTreeB.getIterator();
QuadTreeImpl.QuadTreeIteratorImpl qtIterPathsB = null;
if (quadTreePathsB != null)
qtIterPathsB = quadTreePathsB.getIterator();
while (segIterA.nextPath()) {
overlapLength = 0.0; // depends on control dependency: [while], data = [none]
while (segIterA.hasNextSegment()) {
Segment segmentA = segIterA.nextSegment();
segmentA.queryEnvelope2D(env_a); // depends on control dependency: [while], data = [none]
if (!env_a.isIntersecting(envInter)) {
continue;
}
if (qtIterPathsB != null) {
qtIterPathsB.resetIterator(env_a, tolerance); // depends on control dependency: [if], data = [none]
if (qtIterPathsB.next() == -1)
continue;
}
double lengthA = segmentA.calculateLength2D();
qtIterB.resetIterator(segmentA, tolerance); // depends on control dependency: [while], data = [none]
for (int elementHandleB = qtIterB.next(); elementHandleB != -1; elementHandleB = qtIterB
.next()) {
int vertex_b = quadTreeB.getElement(elementHandleB);
segIterB.resetToVertex(vertex_b); // depends on control dependency: [for], data = [none]
Segment segmentB = segIterB.nextSegment();
double lengthB = segmentB.calculateLength2D();
int result = segmentA.intersect(segmentB, null, scalarsA,
scalarsB, tolerance);
if (result > 0) {
double scalar_a_0 = scalarsA[0];
double scalar_b_0 = scalarsB[0];
double scalar_a_1 = (result == 2 ? scalarsA[1]
: NumberUtils.TheNaN);
double scalar_b_1 = (result == 2 ? scalarsB[1]
: NumberUtils.TheNaN);
if (result == 2) {
if (lengthA * (scalar_a_1 - scalar_a_0) > tolerance) {
dim = 1; // depends on control dependency: [if], data = [none]
return dim; // depends on control dependency: [if], data = [none]
}
// Quick neighbor check
double length = lengthA * (scalar_a_1 - scalar_a_0);
if (segIterB.hasNextSegment()) {
segmentB = segIterB.nextSegment(); // depends on control dependency: [if], data = [none]
result = segmentA.intersect(segmentB, null,
scalarsA, null, tolerance); // depends on control dependency: [if], data = [none]
if (result == 2) {
double nextScalarA0 = scalarsA[0];
double nextScalarA1 = scalarsA[1];
double lengthNext = lengthA
* (nextScalarA1 - nextScalarA0);
if (length + lengthNext > tolerance) {
dim = 1; // depends on control dependency: [if], data = [none]
return dim; // depends on control dependency: [if], data = [none]
}
}
segIterB.resetToVertex(vertex_b); // depends on control dependency: [if], data = [none]
segIterB.nextSegment(); // depends on control dependency: [if], data = [none]
}
if (!segIterB.isFirstSegmentInPath()) {
segIterB.previousSegment(); // depends on control dependency: [if], data = [none]
segmentB = segIterB.previousSegment(); // depends on control dependency: [if], data = [none]
result = segmentA.intersect(segmentB, null,
scalarsA, null, tolerance); // depends on control dependency: [if], data = [none]
if (result == 2) {
double nextScalarA0 = scalarsA[0];
double nextScalarA1 = scalarsA[1];
double lengthPrevious = lengthA
* (nextScalarA1 - nextScalarA0);
if (length + lengthPrevious > tolerance) {
dim = 1; // depends on control dependency: [if], data = [none]
return dim; // depends on control dependency: [if], data = [none]
}
}
segIterB.resetToVertex(vertex_b); // depends on control dependency: [if], data = [none]
segIterB.nextSegment(); // depends on control dependency: [if], data = [none]
}
if (segIterA.hasNextSegment()) {
int vertex_a = segIterA.getStartPointIndex();
segmentA = segIterA.nextSegment(); // depends on control dependency: [if], data = [none]
result = segmentA.intersect(segmentB, null,
scalarsA, null, tolerance); // depends on control dependency: [if], data = [none]
if (result == 2) {
double nextScalarA0 = scalarsA[0];
double nextScalarA1 = scalarsA[1];
double lengthNext = lengthA
* (nextScalarA1 - nextScalarA0);
if (length + lengthNext > tolerance) {
dim = 1; // depends on control dependency: [if], data = [none]
return dim; // depends on control dependency: [if], data = [none]
}
}
segIterA.resetToVertex(vertex_a); // depends on control dependency: [if], data = [none]
segIterA.nextSegment(); // depends on control dependency: [if], data = [none]
}
if (!segIterA.isFirstSegmentInPath()) {
int vertex_a = segIterA.getStartPointIndex();
segIterA.previousSegment(); // depends on control dependency: [if], data = [none]
segmentA = segIterA.previousSegment(); // depends on control dependency: [if], data = [none]
result = segmentA.intersect(segmentB, null,
scalarsA, null, tolerance); // depends on control dependency: [if], data = [none]
if (result == 2) {
double nextScalarA0 = scalarsA[0];
double nextScalarA1 = scalarsA[1];
double lengthPrevious = lengthB
* (nextScalarA1 - nextScalarA0);
if (length + lengthPrevious > tolerance) {
dim = 1; // depends on control dependency: [if], data = [none]
return dim; // depends on control dependency: [if], data = [none]
}
}
segIterA.resetToVertex(vertex_a); // depends on control dependency: [if], data = [none]
segIterA.nextSegment(); // depends on control dependency: [if], data = [none]
}
int ivertex_a = segIterA.getStartPointIndex();
int ipath_a = segIterA.getPathIndex();
int ivertex_b = segIterB.getStartPointIndex();
int ipath_b = segIterB.getPathIndex();
overlapEvent = OverlapEvent.construct(ivertex_a,
ipath_a, scalar_a_0, scalar_a_1, ivertex_b,
ipath_b, scalar_b_0, scalar_b_1); // depends on control dependency: [if], data = [none]
relOps.m_overlap_events.add(overlapEvent); // depends on control dependency: [if], data = [none]
eventIndices.add(eventIndices.size()); // depends on control dependency: [if], data = [none]
}
dim = 0; // depends on control dependency: [if], data = [none]
if (intersections != null) {
segmentA.getCoord2D(scalar_a_0, int_point); // depends on control dependency: [if], data = [none]
intersections.add(int_point.x); // depends on control dependency: [if], data = [none]
intersections.add(int_point.y); // depends on control dependency: [if], data = [none]
}
}
}
if (ievent < relOps.m_overlap_events.size()) {
eventIndices.Sort(ievent, eventIndices.size(),
overlapComparer); // depends on control dependency: [if], data = [none]
double lastScalar = 0.0;
int lastPath = relOps.m_overlap_events.get(eventIndices
.get(ievent)).m_ipath_a;
for (int i = ievent; i < relOps.m_overlap_events.size(); i++) {
overlapEvent = relOps.m_overlap_events.get(eventIndices
.get(i)); // depends on control dependency: [for], data = [none]
if (overlapEvent.m_scalar_a_0 < lastScalar
&& overlapEvent.m_scalar_a_1 < lastScalar) {
continue;
}
if (lengthA * (overlapEvent.m_scalar_a_0 - lastScalar) > tolerance) {
overlapLength = lengthA
* (overlapEvent.m_scalar_a_1 - overlapEvent.m_scalar_a_0); // reset // depends on control dependency: [if], data = [none]
lastScalar = overlapEvent.m_scalar_a_1; // depends on control dependency: [if], data = [none]
lastPath = overlapEvent.m_ipath_a; // depends on control dependency: [if], data = [none]
} else {
if (overlapEvent.m_ipath_a != lastPath) {
overlapLength = lengthA
* (overlapEvent.m_scalar_a_1 - overlapEvent.m_scalar_a_0); // reset // depends on control dependency: [if], data = [none]
lastPath = overlapEvent.m_ipath_a; // depends on control dependency: [if], data = [none]
} else {
overlapLength += lengthA
* (overlapEvent.m_scalar_a_1 - overlapEvent.m_scalar_a_0); // accumulate // depends on control dependency: [if], data = [none]
}
if (overlapLength > tolerance) {
dim = 1; // depends on control dependency: [if], data = [none]
return dim; // depends on control dependency: [if], data = [none]
}
lastScalar = overlapEvent.m_scalar_a_1; // depends on control dependency: [if], data = [none]
if (lastScalar == 1.0) {
break;
}
}
}
if (lengthA * (1.0 - lastScalar) > tolerance) {
overlapLength = 0.0; // reset // depends on control dependency: [if], data = [none]
}
ievent = 0; // depends on control dependency: [if], data = [none]
eventIndices.resize(0); // depends on control dependency: [if], data = [none]
relOps.m_overlap_events.clear(); // depends on control dependency: [if], data = [none]
}
}
}
return dim;
} } |
public class class_name {
private void removeInstalledFeature(String feature, String namespace)
{
// Extensions namespaces by feature
if (namespace == null) {
this.extensionNamespaceByFeature.remove(feature);
} else {
Map<String, InstalledFeature> namespaceInstalledExtension = this.extensionNamespaceByFeature.get(feature);
namespaceInstalledExtension.remove(namespace);
}
} } | public class class_name {
private void removeInstalledFeature(String feature, String namespace)
{
// Extensions namespaces by feature
if (namespace == null) {
this.extensionNamespaceByFeature.remove(feature); // depends on control dependency: [if], data = [none]
} else {
Map<String, InstalledFeature> namespaceInstalledExtension = this.extensionNamespaceByFeature.get(feature);
namespaceInstalledExtension.remove(namespace); // depends on control dependency: [if], data = [(namespace]
}
} } |
public class class_name {
public void resize(int maxCacheSize) {
setMaxCacheSize(maxCacheSize);
for (FeatureCache cache : tableCache.values()) {
cache.resize(maxCacheSize);
}
} } | public class class_name {
public void resize(int maxCacheSize) {
setMaxCacheSize(maxCacheSize);
for (FeatureCache cache : tableCache.values()) {
cache.resize(maxCacheSize); // depends on control dependency: [for], data = [cache]
}
} } |
public class class_name {
private void indexArrayStore(int e, int i)
{
if (valueIndexStore.length < e)
{
int oldLength = valueIndexStore.length;
valueIndexStore = Arrays.copyOf(valueIndexStore, e + 2);
Arrays.fill(valueIndexStore, oldLength, valueIndexStore.length, -1);
}
valueIndexStore[e] = i;
} } | public class class_name {
private void indexArrayStore(int e, int i)
{
if (valueIndexStore.length < e)
{
int oldLength = valueIndexStore.length;
valueIndexStore = Arrays.copyOf(valueIndexStore, e + 2); // depends on control dependency: [if], data = [none]
Arrays.fill(valueIndexStore, oldLength, valueIndexStore.length, -1); // depends on control dependency: [if], data = [none]
}
valueIndexStore[e] = i;
} } |
public class class_name {
public void recalculatePositionOfItemAt(int index) {
// TODO can be improved
final T item = get(index);
removeItemAtIndex(index, false);
int newIndex = add(item, false);
if (index != newIndex) {
mCallback.onMoved(index, newIndex);
}
} } | public class class_name {
public void recalculatePositionOfItemAt(int index) {
// TODO can be improved
final T item = get(index);
removeItemAtIndex(index, false);
int newIndex = add(item, false);
if (index != newIndex) {
mCallback.onMoved(index, newIndex); // depends on control dependency: [if], data = [(index]
}
} } |
public class class_name {
public RuntimeManager build(KnowledgeRuntimeManagerType type, String identifier) {
final RuntimeManager runtimeManager;
final RuntimeEnvironment runtimeEnvironment = _runtimeEnvironmentBuilder.build();
final RemoteManifest remoteManifest = RemoteManifest.removeFromEnvironment(runtimeEnvironment.getEnvironment());
if (remoteManifest != null) {
runtimeManager = new RemoteRuntimeManager(remoteManifest.buildConfiguration(), identifier);
} else {
switch (type) {
case SINGLETON:
runtimeManager = _runtimeManagerFactory.newSingletonRuntimeManager(runtimeEnvironment, identifier);
break;
case PER_REQUEST:
runtimeManager = _runtimeManagerFactory.newPerRequestRuntimeManager(runtimeEnvironment, identifier);
break;
case PER_PROCESS_INSTANCE:
runtimeManager = _runtimeManagerFactory.newPerProcessInstanceRuntimeManager(runtimeEnvironment, identifier);
break;
default:
runtimeManager = null;
break;
}
}
return runtimeManager;
} } | public class class_name {
public RuntimeManager build(KnowledgeRuntimeManagerType type, String identifier) {
final RuntimeManager runtimeManager;
final RuntimeEnvironment runtimeEnvironment = _runtimeEnvironmentBuilder.build();
final RemoteManifest remoteManifest = RemoteManifest.removeFromEnvironment(runtimeEnvironment.getEnvironment());
if (remoteManifest != null) {
runtimeManager = new RemoteRuntimeManager(remoteManifest.buildConfiguration(), identifier); // depends on control dependency: [if], data = [(remoteManifest]
} else {
switch (type) {
case SINGLETON:
runtimeManager = _runtimeManagerFactory.newSingletonRuntimeManager(runtimeEnvironment, identifier);
break;
case PER_REQUEST:
runtimeManager = _runtimeManagerFactory.newPerRequestRuntimeManager(runtimeEnvironment, identifier);
break;
case PER_PROCESS_INSTANCE:
runtimeManager = _runtimeManagerFactory.newPerProcessInstanceRuntimeManager(runtimeEnvironment, identifier);
break;
default:
runtimeManager = null;
break;
}
}
return runtimeManager;
} } |
public class class_name {
private void bigMapInitializer(StorageEngine storageEngine) {
//get all the fields from all the inherited classes
for(Field field : ReflectionMethods.getAllFields(new LinkedList<>(), this.getClass())){
//if the field is annotated with BigMap
if (field.isAnnotationPresent(BigMap.class)) {
initializeBigMapField(storageEngine, field);
}
}
} } | public class class_name {
private void bigMapInitializer(StorageEngine storageEngine) {
//get all the fields from all the inherited classes
for(Field field : ReflectionMethods.getAllFields(new LinkedList<>(), this.getClass())){
//if the field is annotated with BigMap
if (field.isAnnotationPresent(BigMap.class)) {
initializeBigMapField(storageEngine, field); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
@Override
public void prepare( CssFormatter formatter ) {
List<Rule> rules = mixins.get( name );
if( rules != null ) {
for( int i = 0; i < rules.size(); i++ ) {
Rule rule = rules.get( i );
mixins.addAll( rule.getMixins() );
}
}
} } | public class class_name {
@Override
public void prepare( CssFormatter formatter ) {
List<Rule> rules = mixins.get( name );
if( rules != null ) {
for( int i = 0; i < rules.size(); i++ ) {
Rule rule = rules.get( i );
mixins.addAll( rule.getMixins() ); // depends on control dependency: [for], data = [none]
}
}
} } |
public class class_name {
public List<Double> getOffsetsInTimeUnits() {
double start = firstDate.getMillis();
List<Double> result = new ArrayList<>(runtimes.length);
for (int idx=0; idx<runtimes.length; idx++) {
double runtime = (double) getRuntime(idx);
double msecs = (runtime - start);
result.add(msecs / timeUnit.getValueInMillisecs());
}
return result;
} } | public class class_name {
public List<Double> getOffsetsInTimeUnits() {
double start = firstDate.getMillis();
List<Double> result = new ArrayList<>(runtimes.length);
for (int idx=0; idx<runtimes.length; idx++) {
double runtime = (double) getRuntime(idx);
double msecs = (runtime - start);
result.add(msecs / timeUnit.getValueInMillisecs()); // depends on control dependency: [for], data = [none]
}
return result;
} } |
public class class_name {
public void nextState(PHATInterface phatInterface) {
switch (state) {
case NOT_INIT:
//System.out.println(getAgent().getId()+":"+getName()+"(NOT_INIT)");
/*currentState = null;
if(pendingTransitions != null) {
pendingTransitions.clear();
}*/
initState(phatInterface);
currentState = getNextAutomaton();
setState(STATE.STARTED);
break;
case DEFAULT:
//System.out.println(getAgent().getId()+":"+getName()+"(DEFAULT)");
currentState = getDefaultState(phatInterface);
if (currentState == null) {
setState(STATE.FINISHED);
} else {
transmitListeners(currentState);
setState(STATE.DEFAULT_STARTED);
}
break;
case DEFAULT_STARTED:
case STARTED:
//System.out.println(getAgent().getId()+":"+getName()+"(STARTED)");
run(phatInterface);
break;
case FINISHED:
//System.out.println(getAgent().getId()+":"+getName()+"(FINISHED)");
break;
case INTERRUPTED:
//System.out.println(getAgent().getId()+":"+getName()+"(INTERRUPTED)");
if (finishCondition != null) {
finishCondition.automatonInterrupted(this);
}
if (currentState != null) {
currentState.setState(STATE.INTERRUPTED);
currentState.nextState(phatInterface);
}
interrupt(phatInterface);
break;
case RESUMED:
//System.out.println(getAgent().getId()+":"+getName()+"(RESUMED)");
if (finishCondition != null) {
finishCondition.automatonResumed(this);
}
if (currentState != null) {
currentState.setState(STATE.RESUMED);
currentState.nextState(phatInterface);
}
resume(phatInterface);
setState(STATE.STARTED);
break;
}
} } | public class class_name {
public void nextState(PHATInterface phatInterface) {
switch (state) {
case NOT_INIT:
//System.out.println(getAgent().getId()+":"+getName()+"(NOT_INIT)");
/*currentState = null;
if(pendingTransitions != null) {
pendingTransitions.clear();
}*/
initState(phatInterface);
currentState = getNextAutomaton();
setState(STATE.STARTED);
break;
case DEFAULT:
//System.out.println(getAgent().getId()+":"+getName()+"(DEFAULT)");
currentState = getDefaultState(phatInterface);
if (currentState == null) {
setState(STATE.FINISHED); // depends on control dependency: [if], data = [none]
} else {
transmitListeners(currentState); // depends on control dependency: [if], data = [(currentState]
setState(STATE.DEFAULT_STARTED); // depends on control dependency: [if], data = [none]
}
break;
case DEFAULT_STARTED:
case STARTED:
//System.out.println(getAgent().getId()+":"+getName()+"(STARTED)");
run(phatInterface);
break;
case FINISHED:
//System.out.println(getAgent().getId()+":"+getName()+"(FINISHED)");
break;
case INTERRUPTED:
//System.out.println(getAgent().getId()+":"+getName()+"(INTERRUPTED)");
if (finishCondition != null) {
finishCondition.automatonInterrupted(this); // depends on control dependency: [if], data = [none]
}
if (currentState != null) {
currentState.setState(STATE.INTERRUPTED); // depends on control dependency: [if], data = [none]
currentState.nextState(phatInterface); // depends on control dependency: [if], data = [none]
}
interrupt(phatInterface);
break;
case RESUMED:
//System.out.println(getAgent().getId()+":"+getName()+"(RESUMED)");
if (finishCondition != null) {
finishCondition.automatonResumed(this); // depends on control dependency: [if], data = [none]
}
if (currentState != null) {
currentState.setState(STATE.RESUMED); // depends on control dependency: [if], data = [none]
currentState.nextState(phatInterface); // depends on control dependency: [if], data = [none]
}
resume(phatInterface);
setState(STATE.STARTED);
break;
}
} } |
public class class_name {
private int[][] getMatrix(int size) {
logger.info("Creating matrix for isotopes combination");
int lengthM = (int) Math.pow(2, size);
lengthM--;// less 1 because the matrix 00000 we don't need
int[][] matrix = new int[lengthM][size];
int[] combi = new int[size];
for (int j = 0; j < size; j++) {
combi[j] = 0;
}
int posChang = size - 1;
int posRemov = size - 1;
for (int i = 0; i < lengthM; i++) {
// cleaning to zeros
for (int j = posRemov; j < size; j++) {
combi[j] = 0;
}
combi[posChang] = 1;
for (int j = 0; j < size; j++)
matrix[i][j] = combi[j];
if (posChang == size - 1) {
//find where is zero position, place to change
for (int j = posChang; j >= 0; j--) {
if (combi[j] == 0) {
posChang = j;
posRemov = j + 1;
break;
}
}
} else {
//look for the last zero
for (int j = posChang; j < size; j++) {
if (combi[j] == 0) {
posChang = j;
}
}
}
}
return matrix;
} } | public class class_name {
private int[][] getMatrix(int size) {
logger.info("Creating matrix for isotopes combination");
int lengthM = (int) Math.pow(2, size);
lengthM--;// less 1 because the matrix 00000 we don't need
int[][] matrix = new int[lengthM][size];
int[] combi = new int[size];
for (int j = 0; j < size; j++) {
combi[j] = 0; // depends on control dependency: [for], data = [j]
}
int posChang = size - 1;
int posRemov = size - 1;
for (int i = 0; i < lengthM; i++) {
// cleaning to zeros
for (int j = posRemov; j < size; j++) {
combi[j] = 0; // depends on control dependency: [for], data = [j]
}
combi[posChang] = 1; // depends on control dependency: [for], data = [none]
for (int j = 0; j < size; j++)
matrix[i][j] = combi[j];
if (posChang == size - 1) {
//find where is zero position, place to change
for (int j = posChang; j >= 0; j--) {
if (combi[j] == 0) {
posChang = j; // depends on control dependency: [if], data = [none]
posRemov = j + 1; // depends on control dependency: [if], data = [none]
break;
}
}
} else {
//look for the last zero
for (int j = posChang; j < size; j++) {
if (combi[j] == 0) {
posChang = j; // depends on control dependency: [if], data = [none]
}
}
}
}
return matrix;
} } |
public class class_name {
public void marshall(ClassificationType classificationType, ProtocolMarshaller protocolMarshaller) {
if (classificationType == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(classificationType.getOneTime(), ONETIME_BINDING);
protocolMarshaller.marshall(classificationType.getContinuous(), CONTINUOUS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(ClassificationType classificationType, ProtocolMarshaller protocolMarshaller) {
if (classificationType == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(classificationType.getOneTime(), ONETIME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(classificationType.getContinuous(), CONTINUOUS_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void marshall(ErrorInfo errorInfo, ProtocolMarshaller protocolMarshaller) {
if (errorInfo == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(errorInfo.getCode(), CODE_BINDING);
protocolMarshaller.marshall(errorInfo.getMessage(), MESSAGE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(ErrorInfo errorInfo, ProtocolMarshaller protocolMarshaller) {
if (errorInfo == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(errorInfo.getCode(), CODE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(errorInfo.getMessage(), MESSAGE_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private void colorChooserRootBackgroundActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_colorChooserRootBackgroundActionPerformed
if (this.colorChooserRootBackground.isLastOkPressed() && changeNotificationAllowed) {
this.controller.changed();
}
} } | public class class_name {
private void colorChooserRootBackgroundActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_colorChooserRootBackgroundActionPerformed
if (this.colorChooserRootBackground.isLastOkPressed() && changeNotificationAllowed) {
this.controller.changed(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private List<Metric> mapping(List<Metric> metrics) {
SystemAssert.requireArgument(metrics != null, "Cannot transform empty metric/metrics");
if (metrics.isEmpty()) {
return metrics;
}
List<Metric> newMetricsList = new ArrayList<Metric>();
for (Metric metric : metrics) {
Map<Long, Double> cleanDatapoints = cleanDPs(metric.getDatapoints());
metric.setDatapoints(this.valueMapping.mapping(cleanDatapoints));
newMetricsList.add(metric);
}
return newMetricsList;
} } | public class class_name {
private List<Metric> mapping(List<Metric> metrics) {
SystemAssert.requireArgument(metrics != null, "Cannot transform empty metric/metrics");
if (metrics.isEmpty()) {
return metrics; // depends on control dependency: [if], data = [none]
}
List<Metric> newMetricsList = new ArrayList<Metric>();
for (Metric metric : metrics) {
Map<Long, Double> cleanDatapoints = cleanDPs(metric.getDatapoints());
metric.setDatapoints(this.valueMapping.mapping(cleanDatapoints)); // depends on control dependency: [for], data = [metric]
newMetricsList.add(metric); // depends on control dependency: [for], data = [metric]
}
return newMetricsList;
} } |
public class class_name {
public TermDocs termDocs(Term term) throws IOException
{
if (term!=null && term.field() == FieldNames.UUID)
{
// check cache if we have one
if (cache != null)
{
DocNumberCache.Entry e = cache.get(term.text());
if (e != null)
{
// check if valid
// the cache may contain entries from a different reader
// with the same uuid. that happens when a node is updated
// and is reindexed. the node 'travels' from an older index
// to a newer one. the cache will still contain a cache
// entry from the old until it is overwritten by the
// newer index.
if (e.creationTick == creationTick && !isDeleted(e.doc))
{
return new SingleTermDocs(e.doc);
}
}
// not in cache or invalid
TermDocs docs = in.termDocs(term);
try
{
if (docs.next())
{
// put to cache
cache.put(term.text(), this, docs.doc());
// and return
return new SingleTermDocs(docs.doc());
}
else
{
return EmptyTermDocs.INSTANCE;
}
}
finally
{
docs.close();
}
}
}
return termDocsCache.termDocs(term);
} } | public class class_name {
public TermDocs termDocs(Term term) throws IOException
{
if (term!=null && term.field() == FieldNames.UUID)
{
// check cache if we have one
if (cache != null)
{
DocNumberCache.Entry e = cache.get(term.text());
if (e != null)
{
// check if valid
// the cache may contain entries from a different reader
// with the same uuid. that happens when a node is updated
// and is reindexed. the node 'travels' from an older index
// to a newer one. the cache will still contain a cache
// entry from the old until it is overwritten by the
// newer index.
if (e.creationTick == creationTick && !isDeleted(e.doc))
{
return new SingleTermDocs(e.doc); // depends on control dependency: [if], data = [none]
}
}
// not in cache or invalid
TermDocs docs = in.termDocs(term);
try
{
if (docs.next())
{
// put to cache
cache.put(term.text(), this, docs.doc()); // depends on control dependency: [if], data = [none]
// and return
return new SingleTermDocs(docs.doc()); // depends on control dependency: [if], data = [none]
}
else
{
return EmptyTermDocs.INSTANCE; // depends on control dependency: [if], data = [none]
}
}
finally
{
docs.close();
}
}
}
return termDocsCache.termDocs(term);
} } |
public class class_name {
@Nullable
protected List<Match> addLegacyMatches(List <Match> existingSugMatches, String messageStr,
boolean inMessage) {
List<Match> sugMatch = new ArrayList<>();
int pos = 0;
int ind = 0;
int matchCounter = 0;
while (pos != -1) {
pos = messageStr.indexOf('\\', ind);
if (pos != -1 && messageStr.length() > pos && Character.isDigit(messageStr.charAt(pos + 1))) {
if (pos == 0 || messageStr.charAt(pos - 1) != '\u0001') {
Match mWorker = new Match(null, null, false, null,
null, Match.CaseConversion.NONE, false, false, Match.IncludeRange.NONE);
mWorker.setInMessageOnly(true);
sugMatch.add(mWorker);
} else if (messageStr.charAt(pos - 1) == '\u0001') { // real suggestion marker
sugMatch.add(existingSugMatches.get(matchCounter));
if (inMessage) {
message.deleteCharAt(pos - 1 - matchCounter);
} else {
suggestionsOutMsg.deleteCharAt(pos - 1 - matchCounter);
}
matchCounter++;
}
}
ind = pos + 1;
}
if (sugMatch.isEmpty()) {
return existingSugMatches;
}
return sugMatch;
} } | public class class_name {
@Nullable
protected List<Match> addLegacyMatches(List <Match> existingSugMatches, String messageStr,
boolean inMessage) {
List<Match> sugMatch = new ArrayList<>();
int pos = 0;
int ind = 0;
int matchCounter = 0;
while (pos != -1) {
pos = messageStr.indexOf('\\', ind); // depends on control dependency: [while], data = [none]
if (pos != -1 && messageStr.length() > pos && Character.isDigit(messageStr.charAt(pos + 1))) {
if (pos == 0 || messageStr.charAt(pos - 1) != '\u0001') {
Match mWorker = new Match(null, null, false, null,
null, Match.CaseConversion.NONE, false, false, Match.IncludeRange.NONE);
mWorker.setInMessageOnly(true); // depends on control dependency: [if], data = [none]
sugMatch.add(mWorker); // depends on control dependency: [if], data = [none]
} else if (messageStr.charAt(pos - 1) == '\u0001') { // real suggestion marker
sugMatch.add(existingSugMatches.get(matchCounter)); // depends on control dependency: [if], data = [none]
if (inMessage) {
message.deleteCharAt(pos - 1 - matchCounter); // depends on control dependency: [if], data = [none]
} else {
suggestionsOutMsg.deleteCharAt(pos - 1 - matchCounter); // depends on control dependency: [if], data = [none]
}
matchCounter++; // depends on control dependency: [if], data = [none]
}
}
ind = pos + 1; // depends on control dependency: [while], data = [none]
}
if (sugMatch.isEmpty()) {
return existingSugMatches; // depends on control dependency: [if], data = [none]
}
return sugMatch;
} } |
public class class_name {
public Task<EncodedImage> get(CacheKey key, AtomicBoolean isCancelled) {
try {
if (FrescoSystrace.isTracing()) {
FrescoSystrace.beginSection("BufferedDiskCache#get");
}
final EncodedImage pinnedImage = mStagingArea.get(key);
if (pinnedImage != null) {
return foundPinnedImage(key, pinnedImage);
}
return getAsync(key, isCancelled);
} finally {
if (FrescoSystrace.isTracing()) {
FrescoSystrace.endSection();
}
}
} } | public class class_name {
public Task<EncodedImage> get(CacheKey key, AtomicBoolean isCancelled) {
try {
if (FrescoSystrace.isTracing()) {
FrescoSystrace.beginSection("BufferedDiskCache#get"); // depends on control dependency: [if], data = [none]
}
final EncodedImage pinnedImage = mStagingArea.get(key);
if (pinnedImage != null) {
return foundPinnedImage(key, pinnedImage); // depends on control dependency: [if], data = [none]
}
return getAsync(key, isCancelled); // depends on control dependency: [try], data = [none]
} finally {
if (FrescoSystrace.isTracing()) {
FrescoSystrace.endSection(); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public static DatabaseFieldDescriptor getDatabaseFieldDescriptor(final Field field) {
DatabaseFieldDescriptor databaseFieldDescriptor = fieldToDataBaseFieldDescriptorMap.get(field);
if (databaseFieldDescriptor == null) {
DatabaseField databaseField = field.getAnnotation(DatabaseField.class);
if (databaseField != null) {
databaseFieldDescriptor = new DatabaseFieldDescriptor(databaseField);
} else {
// do we have a column definition?
final Column column = field.getAnnotation(Column.class);
if (column != null) {
databaseFieldDescriptor = new DatabaseFieldDescriptor(column);
}
}
fieldToDataBaseFieldDescriptorMap.put(field, databaseFieldDescriptor);
}
return databaseFieldDescriptor;
} } | public class class_name {
public static DatabaseFieldDescriptor getDatabaseFieldDescriptor(final Field field) {
DatabaseFieldDescriptor databaseFieldDescriptor = fieldToDataBaseFieldDescriptorMap.get(field);
if (databaseFieldDescriptor == null) {
DatabaseField databaseField = field.getAnnotation(DatabaseField.class);
if (databaseField != null) {
databaseFieldDescriptor = new DatabaseFieldDescriptor(databaseField); // depends on control dependency: [if], data = [(databaseField]
} else {
// do we have a column definition?
final Column column = field.getAnnotation(Column.class);
if (column != null) {
databaseFieldDescriptor = new DatabaseFieldDescriptor(column); // depends on control dependency: [if], data = [(column]
}
}
fieldToDataBaseFieldDescriptorMap.put(field, databaseFieldDescriptor); // depends on control dependency: [if], data = [none]
}
return databaseFieldDescriptor;
} } |
public class class_name {
public static Date dateReservedMonth(int year, int month, boolean is000) {
Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.YEAR, year);
if (month <= 1) {
calendar.set(Calendar.MONTH, Calendar.JANUARY);
} else if (month >= 12) {
calendar.set(Calendar.MONTH, Calendar.DECEMBER);
} else {
calendar.set(Calendar.MONTH, month - 1);
}
return is000 ? dateReservedMonth000(calendar) : dateReservedMonth999(calendar);
} } | public class class_name {
public static Date dateReservedMonth(int year, int month, boolean is000) {
Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.YEAR, year);
if (month <= 1) {
calendar.set(Calendar.MONTH, Calendar.JANUARY); // depends on control dependency: [if], data = [none]
} else if (month >= 12) {
calendar.set(Calendar.MONTH, Calendar.DECEMBER); // depends on control dependency: [if], data = [none]
} else {
calendar.set(Calendar.MONTH, month - 1); // depends on control dependency: [if], data = [none]
}
return is000 ? dateReservedMonth000(calendar) : dateReservedMonth999(calendar);
} } |
public class class_name {
@SuppressWarnings("unchecked")
public static <T> T unwrap(Object object)
{
T result = (T) object;
if (object != null)
{
while (isForgeProxy(result))
{
try
{
Method method = result.getClass().getMethod("getDelegate");
method.setAccessible(true);
result = (T) method.invoke(result);
}
catch (Exception e)
{
break;
}
}
if (result == null)
result = (T) object;
}
return result;
} } | public class class_name {
@SuppressWarnings("unchecked")
public static <T> T unwrap(Object object)
{
T result = (T) object;
if (object != null)
{
while (isForgeProxy(result))
{
try
{
Method method = result.getClass().getMethod("getDelegate");
method.setAccessible(true); // depends on control dependency: [try], data = [none]
result = (T) method.invoke(result); // depends on control dependency: [try], data = [none]
}
catch (Exception e)
{
break;
} // depends on control dependency: [catch], data = [none]
}
if (result == null)
result = (T) object;
}
return result;
} } |
public class class_name {
public RootNode run() {
// attach a result listener to broadcast
this.nodeRunner.addNotifier(this);
final RootNode rootNode;
try {
rootNode = this.nodeRunner.run();
} finally {
// now send the final notification
final Notification n = new Notification("ExecConfigComplete", SubstepsServerMBean.SUBSTEPS_JMX_MBEAN_NAME, this.notificationSequenceNumber);
this.log.trace("sending complete notification sequence: " + this.notificationSequenceNumber);
sendNotification(n);
}
return rootNode;
} } | public class class_name {
public RootNode run() {
// attach a result listener to broadcast
this.nodeRunner.addNotifier(this);
final RootNode rootNode;
try {
rootNode = this.nodeRunner.run(); // depends on control dependency: [try], data = [none]
} finally {
// now send the final notification
final Notification n = new Notification("ExecConfigComplete", SubstepsServerMBean.SUBSTEPS_JMX_MBEAN_NAME, this.notificationSequenceNumber);
this.log.trace("sending complete notification sequence: " + this.notificationSequenceNumber);
sendNotification(n);
}
return rootNode;
} } |
public class class_name {
public static boolean decodeGSSUPToken(Codec codec, @Sensitive byte[] token_arr,
InitialContextToken gssup_tok) {
boolean result = false;
if (gssup_tok != null) {
try {
byte[] data = readGSSTokenData(GSSUPMechOID.value.substring(4), token_arr);
if (data != null) {
Any a = codec.decode_value(data, InitialContextTokenHelper.type());
InitialContextToken token = InitialContextTokenHelper.extract(a);
if (token != null) {
gssup_tok.username = token.username;
gssup_tok.password = token.password;
gssup_tok.target_name = decodeGSSExportedName(token.target_name).getName().getBytes("UTF-8");
result = true;
}
}
} catch (Exception ex) {
// do nothing, return false
}
}
return result;
} } | public class class_name {
public static boolean decodeGSSUPToken(Codec codec, @Sensitive byte[] token_arr,
InitialContextToken gssup_tok) {
boolean result = false;
if (gssup_tok != null) {
try {
byte[] data = readGSSTokenData(GSSUPMechOID.value.substring(4), token_arr);
if (data != null) {
Any a = codec.decode_value(data, InitialContextTokenHelper.type());
InitialContextToken token = InitialContextTokenHelper.extract(a);
if (token != null) {
gssup_tok.username = token.username; // depends on control dependency: [if], data = [none]
gssup_tok.password = token.password; // depends on control dependency: [if], data = [none]
gssup_tok.target_name = decodeGSSExportedName(token.target_name).getName().getBytes("UTF-8"); // depends on control dependency: [if], data = [(token]
result = true; // depends on control dependency: [if], data = [none]
}
}
} catch (Exception ex) {
// do nothing, return false
} // depends on control dependency: [catch], data = [none]
}
return result;
} } |
public class class_name {
public FileInfo getFileInfo(final URI file) {
if (file == null) {
return null;
} else if (files.containsKey(file)) {
return files.get(file);
} else if (file.isAbsolute() && file.toString().startsWith(tempDirURI.toString())) {
final URI relative = getRelativePath(jobFile.toURI(), file);
return files.get(relative);
} else {
return files.values().stream()
.filter(fileInfo -> file.equals(fileInfo.src) || file.equals(fileInfo.result))
.findFirst()
.orElse(null);
}
} } | public class class_name {
public FileInfo getFileInfo(final URI file) {
if (file == null) {
return null; // depends on control dependency: [if], data = [none]
} else if (files.containsKey(file)) {
return files.get(file); // depends on control dependency: [if], data = [none]
} else if (file.isAbsolute() && file.toString().startsWith(tempDirURI.toString())) {
final URI relative = getRelativePath(jobFile.toURI(), file);
return files.get(relative); // depends on control dependency: [if], data = [none]
} else {
return files.values().stream()
.filter(fileInfo -> file.equals(fileInfo.src) || file.equals(fileInfo.result))
.findFirst()
.orElse(null); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public int compare(Class toType_a, Class toType_b) {
TypeDesc from = mFrom;
TypeDesc a = TypeDesc.forClass(toType_a);
TypeDesc b = TypeDesc.forClass(toType_b);
if (from == a) {
if (from == b) {
return 0;
}
return -1;
} else if (from == b) {
return 1;
}
int result = compare(from, a, b);
if (result != 0) {
return result;
}
if (from.isPrimitive()) {
// Try boxing.
if (from.toObjectType() != null) {
from = from.toObjectType();
return compare(from, a, b);
}
} else {
// Try unboxing.
if (from.toPrimitiveType() != null) {
from = from.toPrimitiveType();
result = compare(from, a, b);
if (result != 0) {
return result;
}
// Try boxing back up. Test by unboxing 'to' types.
if (!toType_a.isPrimitive() && a.toPrimitiveType() != null) {
a = a.toPrimitiveType();
}
if (!toType_b.isPrimitive() && b.toPrimitiveType() != null) {
b = b.toPrimitiveType();
}
return compare(from, a, b);
}
}
return 0;
} } | public class class_name {
public int compare(Class toType_a, Class toType_b) {
TypeDesc from = mFrom;
TypeDesc a = TypeDesc.forClass(toType_a);
TypeDesc b = TypeDesc.forClass(toType_b);
if (from == a) {
if (from == b) {
return 0;
// depends on control dependency: [if], data = [none]
}
return -1;
// depends on control dependency: [if], data = [none]
} else if (from == b) {
return 1;
// depends on control dependency: [if], data = [none]
}
int result = compare(from, a, b);
if (result != 0) {
return result;
// depends on control dependency: [if], data = [none]
}
if (from.isPrimitive()) {
// Try boxing.
if (from.toObjectType() != null) {
from = from.toObjectType();
// depends on control dependency: [if], data = [none]
return compare(from, a, b);
// depends on control dependency: [if], data = [none]
}
} else {
// Try unboxing.
if (from.toPrimitiveType() != null) {
from = from.toPrimitiveType();
// depends on control dependency: [if], data = [none]
result = compare(from, a, b);
// depends on control dependency: [if], data = [none]
if (result != 0) {
return result;
// depends on control dependency: [if], data = [none]
}
// Try boxing back up. Test by unboxing 'to' types.
if (!toType_a.isPrimitive() && a.toPrimitiveType() != null) {
a = a.toPrimitiveType();
// depends on control dependency: [if], data = [none]
}
if (!toType_b.isPrimitive() && b.toPrimitiveType() != null) {
b = b.toPrimitiveType();
// depends on control dependency: [if], data = [none]
}
return compare(from, a, b);
// depends on control dependency: [if], data = [none]
}
}
return 0;
} } |
public class class_name {
void checkDifference() {
resetDiffsAndStats();
InjectionHandler.processEvent(InjectionEvent.DIRECTORY_SCANNER_BEFORE_FILE_SCAN);
Map<Integer, DiskScanInfo[]> diskReportPerNamespace = getDiskReportPerNamespace();
InjectionHandler.processEvent(InjectionEvent.DIRECTORY_SCANNER_AFTER_FILE_SCAN);
try {
for (Entry<Integer, DiskScanInfo[]> entry : diskReportPerNamespace.entrySet()) {
Integer namespaceId = entry.getKey();
DiskScanInfo[] namespaceReport = entry.getValue();
Stats statsRecord = new Stats(namespaceId);
statsPerNamespace.put(namespaceId, statsRecord);
LinkedList<ScanDifference> diffRecords = new LinkedList<ScanDifference>();
diffsPerNamespace.put(namespaceId, diffRecords);
statsRecord.totalBlocks = namespaceReport.length;
Block[] memReport = dataset.getBlockReport(namespaceId);
Arrays.sort(memReport); // Sort based on blockId
int d = 0; // index for blockpoolReport
int m = 0; // index for memReprot
while (m < memReport.length && d < namespaceReport.length) {
Block memBlock = memReport[Math.min(m, memReport.length - 1)];
DiskScanInfo scanInfo = namespaceReport[Math.min(
d, namespaceReport.length - 1)];
if (scanInfo.getBlockId() < memBlock.getBlockId()) {
// Block is missing in memory
// If this block was removed during scan, then do not add it to diff
if (delta.get(namespaceId, scanInfo.getBlockId()) != BlockOperation.REMOVE) {
// Otherwise this is the case for reconciliation
statsRecord.missingMemoryBlocks++;
statsRecord.missingMetaFile +=
scanInfo.getMetaFile() == null ? 1 : 0;
statsRecord.missingBlockFile +=
scanInfo.getBlockFile() == null ? 1 : 0;
diffRecords.add(ScanDifference.createDiffForMemoryBlockMissing(scanInfo));
}
d++;
continue;
}
if (scanInfo.getBlockId() > memBlock.getBlockId()) {
// Block is missing on the disk
// If this block was added during scan, then do not add it to diff
if (delta.get(namespaceId, memBlock) != BlockOperation.ADD) {
statsRecord.missingBlockFile++;
statsRecord.missingMetaFile++;
diffRecords.add(ScanDifference.createDiffForDiskFilesMissing(memBlock.getBlockId()));
}
m++;
continue;
}
// Block file and/or metadata file exists on the disk
// Block exists in memory
if (scanInfo.getBlockFile() == null) {
// Block metadata file exits and block file is missing
// this is not the case for reconciliation if the block was added or removed
// during scanning process
BlockOperation deltaOp = delta.get(namespaceId, memBlock);
if (deltaOp != BlockOperation.ADD && deltaOp != BlockOperation.REMOVE) {
statsRecord.missingBlockFile += scanInfo.getBlockFile() == null ? 1 : 0;
diffRecords.add(ScanDifference.createDiffOutOfSync(scanInfo));
}
} else if (scanInfo.getGenStamp() != memBlock.getGenerationStamp()
|| scanInfo.getLength() != memBlock.getNumBytes()) {
// Block metadata file is missing or has wrong generation stamp,
// or block file length is different than expected. It could happen
// in both add, remove and update operations
if (delta.get(namespaceId, memBlock) == null) {
statsRecord.mismatchBlocks++;
statsRecord.missingMetaFile += scanInfo.getMetaFile() == null ? 1 : 0;
diffRecords.add(ScanDifference.createDiffOutOfSync(scanInfo));
}
}
d++;
m++;
}
while (m < memReport.length) {
if (delta.get(namespaceId, memReport[m].getBlockId()) != BlockOperation.ADD) {
statsRecord.missingBlockFile++;
statsRecord.missingMetaFile++;
diffRecords
.add(ScanDifference.createDiffForDiskFilesMissing(memReport[m].getBlockId()));
}
m++;
}
while (d < namespaceReport.length) {
DiskScanInfo info = namespaceReport[d];
if (delta.get(namespaceId, info.getBlockId()) != BlockOperation.REMOVE) {
statsRecord.missingMemoryBlocks++;
statsRecord.missingMetaFile += info.getMetaFile() == null ? 1 : 0;
statsRecord.missingBlockFile += info.getBlockFile() == null ? 1 : 0;
diffRecords.add(ScanDifference.createDiffForMemoryBlockMissing(info));
}
d++;
}
} //end for
} catch (IOException e) {
LOG.warn("Scanning failed beause of IOException", e);
}
InjectionHandler.processEvent(InjectionEvent.DIRECTORY_SCANNER_AFTER_DIFF);
StringBuilder sb = new StringBuilder();
for (Entry<Integer, Stats> entry : statsPerNamespace.entrySet()) {
sb.append("Namespace ID: " + entry.getKey() + " results: " + entry.getValue().toString());
}
LOG.info(sb.toString());
} } | public class class_name {
void checkDifference() {
resetDiffsAndStats();
InjectionHandler.processEvent(InjectionEvent.DIRECTORY_SCANNER_BEFORE_FILE_SCAN);
Map<Integer, DiskScanInfo[]> diskReportPerNamespace = getDiskReportPerNamespace();
InjectionHandler.processEvent(InjectionEvent.DIRECTORY_SCANNER_AFTER_FILE_SCAN);
try {
for (Entry<Integer, DiskScanInfo[]> entry : diskReportPerNamespace.entrySet()) {
Integer namespaceId = entry.getKey();
DiskScanInfo[] namespaceReport = entry.getValue();
Stats statsRecord = new Stats(namespaceId);
statsPerNamespace.put(namespaceId, statsRecord); // depends on control dependency: [for], data = [none]
LinkedList<ScanDifference> diffRecords = new LinkedList<ScanDifference>();
diffsPerNamespace.put(namespaceId, diffRecords); // depends on control dependency: [for], data = [none]
statsRecord.totalBlocks = namespaceReport.length; // depends on control dependency: [for], data = [none]
Block[] memReport = dataset.getBlockReport(namespaceId);
Arrays.sort(memReport); // Sort based on blockId // depends on control dependency: [for], data = [none]
int d = 0; // index for blockpoolReport
int m = 0; // index for memReprot
while (m < memReport.length && d < namespaceReport.length) {
Block memBlock = memReport[Math.min(m, memReport.length - 1)];
DiskScanInfo scanInfo = namespaceReport[Math.min(
d, namespaceReport.length - 1)];
if (scanInfo.getBlockId() < memBlock.getBlockId()) {
// Block is missing in memory
// If this block was removed during scan, then do not add it to diff
if (delta.get(namespaceId, scanInfo.getBlockId()) != BlockOperation.REMOVE) {
// Otherwise this is the case for reconciliation
statsRecord.missingMemoryBlocks++; // depends on control dependency: [if], data = [none]
statsRecord.missingMetaFile +=
scanInfo.getMetaFile() == null ? 1 : 0; // depends on control dependency: [if], data = [none]
statsRecord.missingBlockFile +=
scanInfo.getBlockFile() == null ? 1 : 0; // depends on control dependency: [if], data = [none]
diffRecords.add(ScanDifference.createDiffForMemoryBlockMissing(scanInfo)); // depends on control dependency: [if], data = [none]
}
d++; // depends on control dependency: [if], data = [none]
continue;
}
if (scanInfo.getBlockId() > memBlock.getBlockId()) {
// Block is missing on the disk
// If this block was added during scan, then do not add it to diff
if (delta.get(namespaceId, memBlock) != BlockOperation.ADD) {
statsRecord.missingBlockFile++; // depends on control dependency: [if], data = [none]
statsRecord.missingMetaFile++; // depends on control dependency: [if], data = [none]
diffRecords.add(ScanDifference.createDiffForDiskFilesMissing(memBlock.getBlockId())); // depends on control dependency: [if], data = [none]
}
m++; // depends on control dependency: [if], data = [none]
continue;
}
// Block file and/or metadata file exists on the disk
// Block exists in memory
if (scanInfo.getBlockFile() == null) {
// Block metadata file exits and block file is missing
// this is not the case for reconciliation if the block was added or removed
// during scanning process
BlockOperation deltaOp = delta.get(namespaceId, memBlock);
if (deltaOp != BlockOperation.ADD && deltaOp != BlockOperation.REMOVE) {
statsRecord.missingBlockFile += scanInfo.getBlockFile() == null ? 1 : 0; // depends on control dependency: [if], data = [none]
diffRecords.add(ScanDifference.createDiffOutOfSync(scanInfo)); // depends on control dependency: [if], data = [none]
}
} else if (scanInfo.getGenStamp() != memBlock.getGenerationStamp()
|| scanInfo.getLength() != memBlock.getNumBytes()) {
// Block metadata file is missing or has wrong generation stamp,
// or block file length is different than expected. It could happen
// in both add, remove and update operations
if (delta.get(namespaceId, memBlock) == null) {
statsRecord.mismatchBlocks++; // depends on control dependency: [if], data = [none]
statsRecord.missingMetaFile += scanInfo.getMetaFile() == null ? 1 : 0; // depends on control dependency: [if], data = [none]
diffRecords.add(ScanDifference.createDiffOutOfSync(scanInfo)); // depends on control dependency: [if], data = [none]
}
}
d++; // depends on control dependency: [while], data = [none]
m++; // depends on control dependency: [while], data = [none]
}
while (m < memReport.length) {
if (delta.get(namespaceId, memReport[m].getBlockId()) != BlockOperation.ADD) {
statsRecord.missingBlockFile++; // depends on control dependency: [if], data = [none]
statsRecord.missingMetaFile++; // depends on control dependency: [if], data = [none]
diffRecords
.add(ScanDifference.createDiffForDiskFilesMissing(memReport[m].getBlockId())); // depends on control dependency: [if], data = [none]
}
m++; // depends on control dependency: [while], data = [none]
}
while (d < namespaceReport.length) {
DiskScanInfo info = namespaceReport[d];
if (delta.get(namespaceId, info.getBlockId()) != BlockOperation.REMOVE) {
statsRecord.missingMemoryBlocks++; // depends on control dependency: [if], data = [none]
statsRecord.missingMetaFile += info.getMetaFile() == null ? 1 : 0; // depends on control dependency: [if], data = [none]
statsRecord.missingBlockFile += info.getBlockFile() == null ? 1 : 0; // depends on control dependency: [if], data = [none]
diffRecords.add(ScanDifference.createDiffForMemoryBlockMissing(info)); // depends on control dependency: [if], data = [none]
}
d++; // depends on control dependency: [while], data = [none]
}
} //end for
} catch (IOException e) {
LOG.warn("Scanning failed beause of IOException", e);
} // depends on control dependency: [catch], data = [none]
InjectionHandler.processEvent(InjectionEvent.DIRECTORY_SCANNER_AFTER_DIFF);
StringBuilder sb = new StringBuilder();
for (Entry<Integer, Stats> entry : statsPerNamespace.entrySet()) {
sb.append("Namespace ID: " + entry.getKey() + " results: " + entry.getValue().toString()); // depends on control dependency: [for], data = [entry]
}
LOG.info(sb.toString());
} } |
public class class_name {
public static <Item extends IItem> void addAllSubItems(Item item, List<Item> items) {
if (item instanceof IExpandable && !((IExpandable) item).isExpanded() && ((IExpandable) item).getSubItems() != null) {
List<Item> subItems = (List<Item>) ((IExpandable<Item, ?>) item).getSubItems();
Item subItem;
for (int i = 0, size = subItems.size(); i < size; i++) {
subItem = subItems.get(i);
items.add(subItem);
addAllSubItems(subItem, items);
}
}
} } | public class class_name {
public static <Item extends IItem> void addAllSubItems(Item item, List<Item> items) {
if (item instanceof IExpandable && !((IExpandable) item).isExpanded() && ((IExpandable) item).getSubItems() != null) {
List<Item> subItems = (List<Item>) ((IExpandable<Item, ?>) item).getSubItems();
Item subItem;
for (int i = 0, size = subItems.size(); i < size; i++) {
subItem = subItems.get(i); // depends on control dependency: [for], data = [i]
items.add(subItem); // depends on control dependency: [for], data = [none]
addAllSubItems(subItem, items); // depends on control dependency: [for], data = [none]
}
}
} } |
public class class_name {
@Override
protected void onMethodExit(int opcode) {
if ((withPreconditions || withPostconditions || withInvariants)
&& opcode != ATHROW) {
if (withPostconditions || withInvariants) {
Label skip = enterBusySection();
if (withPostconditions) {
Type returnType = Type.getReturnType(methodDesc);
int returnIndex = -1;
if (returnType.getSort() != Type.VOID) {
if (returnType.getSize() == 2) {
dup2();
} else {
dup();
}
returnIndex = newLocal(returnType);
storeLocal(returnIndex);
}
invokeCommonPostconditions(ContractKind.POST, oldValueLocals,
returnIndex);
}
if (withInvariants && !statik) {
invokeInvariants();
}
leaveBusySection(skip);
}
leaveContractedMethod();
}
} } | public class class_name {
@Override
protected void onMethodExit(int opcode) {
if ((withPreconditions || withPostconditions || withInvariants)
&& opcode != ATHROW) {
if (withPostconditions || withInvariants) {
Label skip = enterBusySection();
if (withPostconditions) {
Type returnType = Type.getReturnType(methodDesc);
int returnIndex = -1;
if (returnType.getSort() != Type.VOID) {
if (returnType.getSize() == 2) {
dup2(); // depends on control dependency: [if], data = [none]
} else {
dup(); // depends on control dependency: [if], data = [none]
}
returnIndex = newLocal(returnType); // depends on control dependency: [if], data = [none]
storeLocal(returnIndex); // depends on control dependency: [if], data = [none]
}
invokeCommonPostconditions(ContractKind.POST, oldValueLocals,
returnIndex); // depends on control dependency: [if], data = [none]
}
if (withInvariants && !statik) {
invokeInvariants(); // depends on control dependency: [if], data = [none]
}
leaveBusySection(skip); // depends on control dependency: [if], data = [none]
}
leaveContractedMethod(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private Bbox getOverviewMaxBounds() {
Bbox targetMaxBounds;
if (!useTargetMaxExtent) {
targetMaxBounds = new Bbox(getMapModel().getMapInfo().getInitialBounds());
} else {
// maxBounds was not configured, or need to use maxBounds from target
if (targetMap.getMapModel().isInitialized()) {
// rely on target map bounds
targetMaxBounds = targetMap.getMapModel().getMapView().getMaxBounds();
} else {
// fall back to configured bounds (should be temporary)
targetMaxBounds = new Bbox(targetMap.getMapModel().getMapInfo().getMaxBounds());
}
}
return targetMaxBounds;
} } | public class class_name {
private Bbox getOverviewMaxBounds() {
Bbox targetMaxBounds;
if (!useTargetMaxExtent) {
targetMaxBounds = new Bbox(getMapModel().getMapInfo().getInitialBounds()); // depends on control dependency: [if], data = [none]
} else {
// maxBounds was not configured, or need to use maxBounds from target
if (targetMap.getMapModel().isInitialized()) {
// rely on target map bounds
targetMaxBounds = targetMap.getMapModel().getMapView().getMaxBounds(); // depends on control dependency: [if], data = [none]
} else {
// fall back to configured bounds (should be temporary)
targetMaxBounds = new Bbox(targetMap.getMapModel().getMapInfo().getMaxBounds()); // depends on control dependency: [if], data = [none]
}
}
return targetMaxBounds;
} } |
public class class_name {
public static Predicate<Imp> addFilters(
Predicate<Imp> baseFilter, boolean banner, boolean video, boolean nativ) {
int orCount = (banner ? 1 : 0) + (video ? 1 : 0) + (nativ ? 1 : 0);
if (baseFilter == IMP_NONE || orCount == 0) {
return baseFilter;
}
Predicate<Imp> typeFilter = null;
if (banner) {
typeFilter = Imp::hasBanner;
}
if (video) {
typeFilter = typeFilter == null ? Imp::hasVideo : typeFilter.or(Imp::hasVideo);
}
if (nativ) {
typeFilter = typeFilter == null ? Imp::hasNative : typeFilter.or(Imp::hasNative);
}
return baseFilter == IMP_ALL ? typeFilter : baseFilter.and(typeFilter);
} } | public class class_name {
public static Predicate<Imp> addFilters(
Predicate<Imp> baseFilter, boolean banner, boolean video, boolean nativ) {
int orCount = (banner ? 1 : 0) + (video ? 1 : 0) + (nativ ? 1 : 0);
if (baseFilter == IMP_NONE || orCount == 0) {
return baseFilter; // depends on control dependency: [if], data = [none]
}
Predicate<Imp> typeFilter = null;
if (banner) {
typeFilter = Imp::hasBanner; // depends on control dependency: [if], data = [none]
}
if (video) {
typeFilter = typeFilter == null ? Imp::hasVideo : typeFilter.or(Imp::hasVideo); // depends on control dependency: [if], data = [none]
}
if (nativ) {
typeFilter = typeFilter == null ? Imp::hasNative : typeFilter.or(Imp::hasNative); // depends on control dependency: [if], data = [none]
}
return baseFilter == IMP_ALL ? typeFilter : baseFilter.and(typeFilter);
} } |
public class class_name {
protected void removeTab(GalleryTabId tabId) {
if (m_tabIds != null) {
List<GalleryTabId> tabs = new ArrayList<GalleryTabId>(Arrays.asList(m_tabIds));
if (tabs.contains(tabId)) {
m_tabIds = new GalleryTabId[tabs.size() - 1];
tabs.remove(tabId);
m_tabIds = tabs.toArray(new GalleryTabId[tabs.size()]);
}
}
} } | public class class_name {
protected void removeTab(GalleryTabId tabId) {
if (m_tabIds != null) {
List<GalleryTabId> tabs = new ArrayList<GalleryTabId>(Arrays.asList(m_tabIds));
if (tabs.contains(tabId)) {
m_tabIds = new GalleryTabId[tabs.size() - 1]; // depends on control dependency: [if], data = [none]
tabs.remove(tabId); // depends on control dependency: [if], data = [none]
m_tabIds = tabs.toArray(new GalleryTabId[tabs.size()]); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void marshall(DeletePresetRequest deletePresetRequest, ProtocolMarshaller protocolMarshaller) {
if (deletePresetRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deletePresetRequest.getName(), NAME_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(DeletePresetRequest deletePresetRequest, ProtocolMarshaller protocolMarshaller) {
if (deletePresetRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deletePresetRequest.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void remove(Identity oid)
{
try
{
jcsCache.remove(oid.toString());
}
catch (CacheException e)
{
throw new RuntimeCacheException(e.getMessage());
}
} } | public class class_name {
public void remove(Identity oid)
{
try
{
jcsCache.remove(oid.toString());
// depends on control dependency: [try], data = [none]
}
catch (CacheException e)
{
throw new RuntimeCacheException(e.getMessage());
}
// depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected TablePopulationRequirements checkAddDropIfTableIsEmptyWhitelist(final CatalogType suspect,
final ChangeType changeType) {
TablePopulationRequirements retval = null;
// handle adding an index - presumably unique
if (suspect instanceof Index) {
Index idx = (Index) suspect;
String indexName = idx.getTypeName();
retval = new TablePopulationRequirements(indexName);
String tableName = idx.getParent().getTypeName();
retval.addTableName(tableName);
if (! idx.getIssafewithnonemptysources()) {
retval.setErrorMessage("Unable to create index " + indexName +
" while the table contains data." +
" The index definition uses operations that cannot be applied " +
"if table " + tableName + " is not empty.");
}
else if (idx.getUnique()) {
retval.setErrorMessage(
String.format(
"Unable to add unique index %s because table %s is not empty.",
indexName,
tableName));
}
return retval;
}
CatalogType parent = suspect.getParent();
// handle changes to columns in an index - presumably drops and presumably unique
if ((suspect instanceof ColumnRef) && (parent instanceof Index)) {
Index idx = (Index) parent;
assert(idx.getUnique());
assert(changeType == ChangeType.DELETION);
Table table = (Table) idx.getParent();
String indexName = idx.getTypeName();
String tableName = table.getTypeName();
String errorMessage =
String.format(
"Unable to remove column %s from unique index %s because table %s is not empty.",
suspect.getTypeName(),
indexName,
tableName);
retval = new TablePopulationRequirements(indexName, tableName, errorMessage);
retval.addTableName(tableName);
return retval;
}
if ((suspect instanceof Column) && (parent instanceof Table) && (changeType == ChangeType.ADDITION)) {
String tableName = parent.getTypeName();
retval = new TablePopulationRequirements(tableName);
retval.addTableName(tableName);
retval.setErrorMessage(
String.format(
"Unable to add NOT NULL column %s because table %s is not empty and no default value was specified.",
suspect.getTypeName(), tableName));
return retval;
}
// Check to see if a table is a materialized view. If
// so, we want to check if the table is safe for non-empty
// source tables, and leave the correct error message if so.
if (suspect instanceof Table) {
Table tbl = (Table)suspect;
if (tbl.getMvhandlerinfo().size() > 0) {
MaterializedViewHandlerInfo mvhInfo = tbl.getMvhandlerinfo().get("mvhandlerinfo");
if ( mvhInfo != null && ( ! mvhInfo.getIssafewithnonemptysources()) ) {
retval = getMVHandlerInfoMessage(mvhInfo);
if (retval != null) {
return retval;
}
}
} else {
MaterializedViewInfo mvInfo = MaterializedViewProcessor.getMaterializedViewInfo(tbl);
if (mvInfo != null && ( ! mvInfo.getIssafewithnonemptysources())) {
retval = getMVInfoMessage(tbl, mvInfo);
if (retval != null) {
return retval;
}
}
}
if (TableType.isPersistentMigrate(tbl.getTabletype())) {
m_requiresNewExportGeneration = true;
}
}
return null;
} } | public class class_name {
protected TablePopulationRequirements checkAddDropIfTableIsEmptyWhitelist(final CatalogType suspect,
final ChangeType changeType) {
TablePopulationRequirements retval = null;
// handle adding an index - presumably unique
if (suspect instanceof Index) {
Index idx = (Index) suspect;
String indexName = idx.getTypeName();
retval = new TablePopulationRequirements(indexName); // depends on control dependency: [if], data = [none]
String tableName = idx.getParent().getTypeName();
retval.addTableName(tableName); // depends on control dependency: [if], data = [none]
if (! idx.getIssafewithnonemptysources()) {
retval.setErrorMessage("Unable to create index " + indexName +
" while the table contains data." +
" The index definition uses operations that cannot be applied " +
"if table " + tableName + " is not empty."); // depends on control dependency: [if], data = [none]
}
else if (idx.getUnique()) {
retval.setErrorMessage(
String.format(
"Unable to add unique index %s because table %s is not empty.",
indexName,
tableName)); // depends on control dependency: [if], data = [none]
}
return retval; // depends on control dependency: [if], data = [none]
}
CatalogType parent = suspect.getParent();
// handle changes to columns in an index - presumably drops and presumably unique
if ((suspect instanceof ColumnRef) && (parent instanceof Index)) {
Index idx = (Index) parent;
assert(idx.getUnique()); // depends on control dependency: [if], data = [none]
assert(changeType == ChangeType.DELETION); // depends on control dependency: [if], data = [none]
Table table = (Table) idx.getParent();
String indexName = idx.getTypeName();
String tableName = table.getTypeName();
String errorMessage =
String.format(
"Unable to remove column %s from unique index %s because table %s is not empty.",
suspect.getTypeName(),
indexName,
tableName);
retval = new TablePopulationRequirements(indexName, tableName, errorMessage); // depends on control dependency: [if], data = [none]
retval.addTableName(tableName); // depends on control dependency: [if], data = [none]
return retval; // depends on control dependency: [if], data = [none]
}
if ((suspect instanceof Column) && (parent instanceof Table) && (changeType == ChangeType.ADDITION)) {
String tableName = parent.getTypeName();
retval = new TablePopulationRequirements(tableName); // depends on control dependency: [if], data = [none]
retval.addTableName(tableName); // depends on control dependency: [if], data = [none]
retval.setErrorMessage(
String.format(
"Unable to add NOT NULL column %s because table %s is not empty and no default value was specified.",
suspect.getTypeName(), tableName)); // depends on control dependency: [if], data = [none]
return retval; // depends on control dependency: [if], data = [none]
}
// Check to see if a table is a materialized view. If
// so, we want to check if the table is safe for non-empty
// source tables, and leave the correct error message if so.
if (suspect instanceof Table) {
Table tbl = (Table)suspect;
if (tbl.getMvhandlerinfo().size() > 0) {
MaterializedViewHandlerInfo mvhInfo = tbl.getMvhandlerinfo().get("mvhandlerinfo");
if ( mvhInfo != null && ( ! mvhInfo.getIssafewithnonemptysources()) ) {
retval = getMVHandlerInfoMessage(mvhInfo); // depends on control dependency: [if], data = [none]
if (retval != null) {
return retval; // depends on control dependency: [if], data = [none]
}
}
} else {
MaterializedViewInfo mvInfo = MaterializedViewProcessor.getMaterializedViewInfo(tbl);
if (mvInfo != null && ( ! mvInfo.getIssafewithnonemptysources())) {
retval = getMVInfoMessage(tbl, mvInfo); // depends on control dependency: [if], data = [none]
if (retval != null) {
return retval; // depends on control dependency: [if], data = [none]
}
}
}
if (TableType.isPersistentMigrate(tbl.getTabletype())) {
m_requiresNewExportGeneration = true; // depends on control dependency: [if], data = [none]
}
}
return null;
} } |
public class class_name {
@Override
protected LofDataSet mergeState(LofDataSet baseDataSet, LofDataSet targetDataSet,
Map<String, Object> mergeConfig)
{
int kn = Integer.parseInt(mergeConfig.get(LofConfKey.KN).toString());
int maxDataCount = Integer.parseInt(mergeConfig.get(LofConfKey.MAX_DATA_COUNT).toString());
// 学習データの保持する対象点を新しい方から最大保持数個だけ取得
LofDataSet mergedDataSet = LofCalculator.mergeDataSet(baseDataSet, targetDataSet,
maxDataCount);
// 中間データ保持設定が存在しない場合は中間データの生成は行わない。
if (mergeConfig.containsKey(LofConfKey.HAS_INTERMEDIATE) == false)
{
return mergedDataSet;
}
// 中間データ保持設定が存在し、かつ「true」の場合のみ中間データの生成を行う。
if (Boolean.getBoolean(mergeConfig.get(LofConfKey.HAS_INTERMEDIATE).toString()) == true)
{
LofCalculator.initDataSet(kn, mergedDataSet);
}
return mergedDataSet;
} } | public class class_name {
@Override
protected LofDataSet mergeState(LofDataSet baseDataSet, LofDataSet targetDataSet,
Map<String, Object> mergeConfig)
{
int kn = Integer.parseInt(mergeConfig.get(LofConfKey.KN).toString());
int maxDataCount = Integer.parseInt(mergeConfig.get(LofConfKey.MAX_DATA_COUNT).toString());
// 学習データの保持する対象点を新しい方から最大保持数個だけ取得
LofDataSet mergedDataSet = LofCalculator.mergeDataSet(baseDataSet, targetDataSet,
maxDataCount);
// 中間データ保持設定が存在しない場合は中間データの生成は行わない。
if (mergeConfig.containsKey(LofConfKey.HAS_INTERMEDIATE) == false)
{
return mergedDataSet; // depends on control dependency: [if], data = [none]
}
// 中間データ保持設定が存在し、かつ「true」の場合のみ中間データの生成を行う。
if (Boolean.getBoolean(mergeConfig.get(LofConfKey.HAS_INTERMEDIATE).toString()) == true)
{
LofCalculator.initDataSet(kn, mergedDataSet); // depends on control dependency: [if], data = [none]
}
return mergedDataSet;
} } |
public class class_name {
public void uncheckTypes(Collection<String> types) {
for (String type : types) {
CmsListItem item = (CmsListItem)m_scrollList.getItem(type);
if (item != null) {
item.getCheckBox().setChecked(false);
}
}
} } | public class class_name {
public void uncheckTypes(Collection<String> types) {
for (String type : types) {
CmsListItem item = (CmsListItem)m_scrollList.getItem(type);
if (item != null) {
item.getCheckBox().setChecked(false); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void report(PrintWriter w) {
w.println("You are authenticated as: "+authentication.getName());
w.println("Groups that you are in:");
for (GrantedAuthority auth : authentication.getAuthorities()) {
w.println(" "+auth.getAuthority());
}
w.println("Permission you need to have (but didn't): "+permission.getId());
for (Permission p=permission.impliedBy; p!=null; p=p.impliedBy) {
w.println(" ... which is implied by: "+p.getId());
}
} } | public class class_name {
public void report(PrintWriter w) {
w.println("You are authenticated as: "+authentication.getName());
w.println("Groups that you are in:");
for (GrantedAuthority auth : authentication.getAuthorities()) {
w.println(" "+auth.getAuthority()); // depends on control dependency: [for], data = [auth]
}
w.println("Permission you need to have (but didn't): "+permission.getId());
for (Permission p=permission.impliedBy; p!=null; p=p.impliedBy) {
w.println(" ... which is implied by: "+p.getId()); // depends on control dependency: [for], data = [none]
}
} } |
public class class_name {
@Override
public int getInt(final String key, final int defolt) {
try {
String value = get(key);
if (value == null) {
return defolt;
}
return Integer.parseInt(value);
} catch (NumberFormatException ex) {
throw new ConversionException(ex);
}
} } | public class class_name {
@Override
public int getInt(final String key, final int defolt) {
try {
String value = get(key);
if (value == null) {
return defolt; // depends on control dependency: [if], data = [none]
}
return Integer.parseInt(value); // depends on control dependency: [try], data = [none]
} catch (NumberFormatException ex) {
throw new ConversionException(ex);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public InstanceResizePolicy withInstancesToTerminate(String... instancesToTerminate) {
if (this.instancesToTerminate == null) {
setInstancesToTerminate(new com.amazonaws.internal.SdkInternalList<String>(instancesToTerminate.length));
}
for (String ele : instancesToTerminate) {
this.instancesToTerminate.add(ele);
}
return this;
} } | public class class_name {
public InstanceResizePolicy withInstancesToTerminate(String... instancesToTerminate) {
if (this.instancesToTerminate == null) {
setInstancesToTerminate(new com.amazonaws.internal.SdkInternalList<String>(instancesToTerminate.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : instancesToTerminate) {
this.instancesToTerminate.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public Long asLongObj() {
if (current == null)
return null;
if (current instanceof Number) {
if (current instanceof Long)
return (Long) current;
if (current instanceof Integer)
return Long.valueOf(((Number) current).longValue());
return null;
}
return null;
} } | public class class_name {
public Long asLongObj() {
if (current == null)
return null;
if (current instanceof Number) {
if (current instanceof Long)
return (Long) current;
if (current instanceof Integer)
return Long.valueOf(((Number) current).longValue());
return null; // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
public static int countMatches(String str, String sub) {
if (isEmpty(str) || isEmpty(sub)) {
return 0;
}
int count = 0;
int idx = 0;
while ((idx = str.indexOf(sub, idx)) != -1) {
count++;
idx += sub.length();
}
return count;
} } | public class class_name {
public static int countMatches(String str, String sub) {
if (isEmpty(str) || isEmpty(sub)) {
return 0; // depends on control dependency: [if], data = [none]
}
int count = 0;
int idx = 0;
while ((idx = str.indexOf(sub, idx)) != -1) {
count++; // depends on control dependency: [while], data = [none]
idx += sub.length(); // depends on control dependency: [while], data = [none]
}
return count;
} } |
public class class_name {
public Reference parse(String sig) throws ParseException {
// Break sig apart into qualifiedExpr member paramTypes.
JCTree qualExpr;
Name member;
List<JCTree> paramTypes;
Log.DeferredDiagnosticHandler deferredDiagnosticHandler
= new Log.DeferredDiagnosticHandler(fac.log);
try {
int hash = sig.indexOf("#");
int lparen = sig.indexOf("(", hash + 1);
if (hash == -1) {
if (lparen == -1) {
qualExpr = parseType(sig);
member = null;
} else {
qualExpr = null;
member = parseMember(sig.substring(0, lparen));
}
} else {
qualExpr = (hash == 0) ? null : parseType(sig.substring(0, hash));
if (lparen == -1)
member = parseMember(sig.substring(hash + 1));
else
member = parseMember(sig.substring(hash + 1, lparen));
}
if (lparen < 0) {
paramTypes = null;
} else {
int rparen = sig.indexOf(")", lparen);
if (rparen != sig.length() - 1)
throw new ParseException("dc.ref.bad.parens");
paramTypes = parseParams(sig.substring(lparen + 1, rparen));
}
if (!deferredDiagnosticHandler.getDiagnostics().isEmpty())
throw new ParseException("dc.ref.syntax.error");
} finally {
fac.log.popDiagnosticHandler(deferredDiagnosticHandler);
}
return new Reference(qualExpr, member, paramTypes);
} } | public class class_name {
public Reference parse(String sig) throws ParseException {
// Break sig apart into qualifiedExpr member paramTypes.
JCTree qualExpr;
Name member;
List<JCTree> paramTypes;
Log.DeferredDiagnosticHandler deferredDiagnosticHandler
= new Log.DeferredDiagnosticHandler(fac.log);
try {
int hash = sig.indexOf("#");
int lparen = sig.indexOf("(", hash + 1);
if (hash == -1) {
if (lparen == -1) {
qualExpr = parseType(sig); // depends on control dependency: [if], data = [none]
member = null; // depends on control dependency: [if], data = [none]
} else {
qualExpr = null; // depends on control dependency: [if], data = [none]
member = parseMember(sig.substring(0, lparen)); // depends on control dependency: [if], data = [none]
}
} else {
qualExpr = (hash == 0) ? null : parseType(sig.substring(0, hash)); // depends on control dependency: [if], data = [(hash]
if (lparen == -1)
member = parseMember(sig.substring(hash + 1));
else
member = parseMember(sig.substring(hash + 1, lparen));
}
if (lparen < 0) {
paramTypes = null; // depends on control dependency: [if], data = [none]
} else {
int rparen = sig.indexOf(")", lparen);
if (rparen != sig.length() - 1)
throw new ParseException("dc.ref.bad.parens");
paramTypes = parseParams(sig.substring(lparen + 1, rparen)); // depends on control dependency: [if], data = [(lparen]
}
if (!deferredDiagnosticHandler.getDiagnostics().isEmpty())
throw new ParseException("dc.ref.syntax.error");
} finally {
fac.log.popDiagnosticHandler(deferredDiagnosticHandler);
}
return new Reference(qualExpr, member, paramTypes);
} } |
public class class_name {
public static boolean isAnyNull(Object... _objects) {
if (_objects == null) {
return true;
}
for (Object obj : _objects) {
if (obj == null) {
return true;
}
}
return false;
} } | public class class_name {
public static boolean isAnyNull(Object... _objects) {
if (_objects == null) {
return true; // depends on control dependency: [if], data = [none]
}
for (Object obj : _objects) {
if (obj == null) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
public static String nameHash(X500Principal subjectDN) {
try {
return hash(encodePrincipal(subjectDN));
} catch (Exception e) {
logger.error("", e);
return null;
}
} } | public class class_name {
public static String nameHash(X500Principal subjectDN) {
try {
return hash(encodePrincipal(subjectDN)); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
logger.error("", e);
return null;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static Date parseFromIso8601(String s) {
synchronized (MONITOR) {
try {
return ISO8601.parse(s);
} catch (ParseException e) {
throw new IllegalArgumentException("Unable to parse date", e);
}
}
} } | public class class_name {
public static Date parseFromIso8601(String s) {
synchronized (MONITOR) {
try {
return ISO8601.parse(s); // depends on control dependency: [try], data = [none]
} catch (ParseException e) {
throw new IllegalArgumentException("Unable to parse date", e);
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
private String getTaskType(TaskInProgress tip) {
if (tip.isJobCleanupTask()) {
return Values.CLEANUP.name();
} else if (tip.isJobSetupTask()) {
return Values.SETUP.name();
} else if (tip.isMapTask()) {
return Values.MAP.name();
} else {
return Values.REDUCE.name();
}
} } | public class class_name {
private String getTaskType(TaskInProgress tip) {
if (tip.isJobCleanupTask()) {
return Values.CLEANUP.name(); // depends on control dependency: [if], data = [none]
} else if (tip.isJobSetupTask()) {
return Values.SETUP.name(); // depends on control dependency: [if], data = [none]
} else if (tip.isMapTask()) {
return Values.MAP.name(); // depends on control dependency: [if], data = [none]
} else {
return Values.REDUCE.name(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String generatePassword(int length)
{
Params.strictPositive(length, "Password length");
StringBuilder password = new StringBuilder(length);
Random random = new Random();
for(int i = 0; i < length; i++) {
String charCategory = PASSWORD_DICTIONARY[random.nextInt(PASSWORD_DICTIONARY.length)];
int position = random.nextInt(charCategory.length());
password.append(charCategory.charAt(position));
}
return new String(password);
} } | public class class_name {
public static String generatePassword(int length)
{
Params.strictPositive(length, "Password length");
StringBuilder password = new StringBuilder(length);
Random random = new Random();
for(int i = 0; i < length; i++) {
String charCategory = PASSWORD_DICTIONARY[random.nextInt(PASSWORD_DICTIONARY.length)];
int position = random.nextInt(charCategory.length());
password.append(charCategory.charAt(position));
// depends on control dependency: [for], data = [none]
}
return new String(password);
} } |
public class class_name {
public Callable<?> get(String id, Class<?> type) {
Resolver callable = this.id2callableMap.get(id);
if (callable == null) {
callable = new Resolver(id, type);
this.id2callableMap.put(id, callable);
}
return callable;
} } | public class class_name {
public Callable<?> get(String id, Class<?> type) {
Resolver callable = this.id2callableMap.get(id);
if (callable == null) {
callable = new Resolver(id, type); // depends on control dependency: [if], data = [none]
this.id2callableMap.put(id, callable); // depends on control dependency: [if], data = [none]
}
return callable;
} } |
public class class_name {
private static DSFactory doCreate(Setting setting) {
try {
return new HikariDSFactory(setting);
} catch (NoClassDefFoundError e) {
// ignore
}
try {
return new DruidDSFactory(setting);
} catch (NoClassDefFoundError e) {
// ignore
}
try {
return new TomcatDSFactory(setting);
} catch (NoClassDefFoundError e) {
//如果未引入包,此处会报org.apache.tomcat.jdbc.pool.PoolConfiguration未找到错误
//因为org.apache.tomcat.jdbc.pool.DataSource实现了此接口,会首先检查接口的存在与否
// ignore
}
try {
return new DbcpDSFactory(setting);
} catch (NoClassDefFoundError e) {
// ignore
}
try {
return new C3p0DSFactory(setting);
} catch (NoClassDefFoundError e) {
// ignore
}
return new PooledDSFactory(setting);
} } | public class class_name {
private static DSFactory doCreate(Setting setting) {
try {
return new HikariDSFactory(setting);
// depends on control dependency: [try], data = [none]
} catch (NoClassDefFoundError e) {
// ignore
}
// depends on control dependency: [catch], data = [none]
try {
return new DruidDSFactory(setting);
// depends on control dependency: [try], data = [none]
} catch (NoClassDefFoundError e) {
// ignore
}
// depends on control dependency: [catch], data = [none]
try {
return new TomcatDSFactory(setting);
// depends on control dependency: [try], data = [none]
} catch (NoClassDefFoundError e) {
//如果未引入包,此处会报org.apache.tomcat.jdbc.pool.PoolConfiguration未找到错误
//因为org.apache.tomcat.jdbc.pool.DataSource实现了此接口,会首先检查接口的存在与否
// ignore
}
// depends on control dependency: [catch], data = [none]
try {
return new DbcpDSFactory(setting);
// depends on control dependency: [try], data = [none]
} catch (NoClassDefFoundError e) {
// ignore
}
// depends on control dependency: [catch], data = [none]
try {
return new C3p0DSFactory(setting);
// depends on control dependency: [try], data = [none]
} catch (NoClassDefFoundError e) {
// ignore
}
// depends on control dependency: [catch], data = [none]
return new PooledDSFactory(setting);
} } |
public class class_name {
public static IntStreamEx of(InputStream is) {
Spliterator.OfInt spliterator = new AbstractIntSpliterator(Long.MAX_VALUE, Spliterator.ORDERED
| Spliterator.NONNULL) {
@Override
public boolean tryAdvance(IntConsumer action) {
try {
int next = is.read();
if (next == -1)
return false;
action.accept(next);
return true;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
};
return of(spliterator).onClose(() -> {
try {
is.close();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
} } | public class class_name {
public static IntStreamEx of(InputStream is) {
Spliterator.OfInt spliterator = new AbstractIntSpliterator(Long.MAX_VALUE, Spliterator.ORDERED
| Spliterator.NONNULL) {
@Override
public boolean tryAdvance(IntConsumer action) {
try {
int next = is.read();
if (next == -1)
return false;
action.accept(next); // depends on control dependency: [try], data = [none]
return true; // depends on control dependency: [try], data = [none]
} catch (IOException e) {
throw new UncheckedIOException(e);
} // depends on control dependency: [catch], data = [none]
}
};
return of(spliterator).onClose(() -> {
try {
is.close(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
throw new UncheckedIOException(e);
} // depends on control dependency: [catch], data = [none]
});
} } |
public class class_name {
public static boolean contentEqualsIgnoreCase(CharSequence a, CharSequence b) {
if (a == null || b == null) {
return a == b;
}
if (a.getClass() == AsciiString.class) {
return ((AsciiString) a).contentEqualsIgnoreCase(b);
}
if (b.getClass() == AsciiString.class) {
return ((AsciiString) b).contentEqualsIgnoreCase(a);
}
if (a.length() != b.length()) {
return false;
}
for (int i = 0; i < a.length(); ++i) {
if (!equalsIgnoreCase(a.charAt(i), b.charAt(i))) {
return false;
}
}
return true;
} } | public class class_name {
public static boolean contentEqualsIgnoreCase(CharSequence a, CharSequence b) {
if (a == null || b == null) {
return a == b; // depends on control dependency: [if], data = [none]
}
if (a.getClass() == AsciiString.class) {
return ((AsciiString) a).contentEqualsIgnoreCase(b); // depends on control dependency: [if], data = [none]
}
if (b.getClass() == AsciiString.class) {
return ((AsciiString) b).contentEqualsIgnoreCase(a); // depends on control dependency: [if], data = [none]
}
if (a.length() != b.length()) {
return false; // depends on control dependency: [if], data = [none]
}
for (int i = 0; i < a.length(); ++i) {
if (!equalsIgnoreCase(a.charAt(i), b.charAt(i))) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
} } |
public class class_name {
boolean bindAttribute(String ns, String wildcard, AttributeActionSet actions) {
NamespaceSpecification nss = new NamespaceSpecification(ns, wildcard);
if (nssAttributeMap.get(nss) != null)
return false;
for (Enumeration e = nssAttributeMap.keys(); e.hasMoreElements();) {
NamespaceSpecification nssI = (NamespaceSpecification)e.nextElement();
if (nss.compete(nssI)) {
return false;
}
}
nssAttributeMap.put(nss, actions);
return true;
} } | public class class_name {
boolean bindAttribute(String ns, String wildcard, AttributeActionSet actions) {
NamespaceSpecification nss = new NamespaceSpecification(ns, wildcard);
if (nssAttributeMap.get(nss) != null)
return false;
for (Enumeration e = nssAttributeMap.keys(); e.hasMoreElements();) {
NamespaceSpecification nssI = (NamespaceSpecification)e.nextElement();
if (nss.compete(nssI)) {
return false; // depends on control dependency: [if], data = [none]
}
}
nssAttributeMap.put(nss, actions);
return true;
} } |
public class class_name {
public static boolean containsArgument(String[] args, String arg) {
if (args == null || arg == null || arg.length() == 0 || args.length == 0) {
return false;
} else {
for (String a : args) {
if (a.equalsIgnoreCase(arg)) {
return true;
} else if (a.startsWith(arg + "=")) {
return true;
}
}
return false;
}
} } | public class class_name {
public static boolean containsArgument(String[] args, String arg) {
if (args == null || arg == null || arg.length() == 0 || args.length == 0) {
return false; // depends on control dependency: [if], data = [none]
} else {
for (String a : args) {
if (a.equalsIgnoreCase(arg)) {
return true; // depends on control dependency: [if], data = [none]
} else if (a.startsWith(arg + "=")) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public final synchronized boolean evalSuccess(Operation<?> operation, Hint... hints) {
final boolean dummyRoot = stack.isEmpty();
if (dummyRoot) {
// add a root frame to preserve our cache "around" the supports/eval lifecycle, bypassing #push():
stack.push(Frame.ROOT);
}
try {
if (supports(operation, hints)) {
eval(operation, hints);
return operation.isSuccessful();
}
} finally {
if (dummyRoot) {
pop(Frame.ROOT);
}
}
return false;
} } | public class class_name {
public final synchronized boolean evalSuccess(Operation<?> operation, Hint... hints) {
final boolean dummyRoot = stack.isEmpty();
if (dummyRoot) {
// add a root frame to preserve our cache "around" the supports/eval lifecycle, bypassing #push():
stack.push(Frame.ROOT); // depends on control dependency: [if], data = [none]
}
try {
if (supports(operation, hints)) {
eval(operation, hints); // depends on control dependency: [if], data = [none]
return operation.isSuccessful(); // depends on control dependency: [if], data = [none]
}
} finally {
if (dummyRoot) {
pop(Frame.ROOT); // depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
@Deprecated
public <T> T fromDb(final Datastore datastore, final DBObject dbObject, final T entity, final EntityCache cache) {
//hack to bypass things and just read the value.
if (entity instanceof MappedField) {
readMappedField(datastore, (MappedField) entity, entity, cache, dbObject);
return entity;
}
// check the history key (a key is the namespace + id)
if (dbObject.containsField("_id") && getMappedClass(entity).getIdField() != null
&& getMappedClass(entity).getEntityAnnotation() != null) {
final Key<T> key = new Key(entity.getClass(), getCollectionName(entity.getClass()), dbObject.get("_id"));
final T cachedInstance = cache.getEntity(key);
if (cachedInstance != null) {
return cachedInstance;
} else {
cache.putEntity(key, entity); // to avoid stackOverflow in recursive refs
}
}
if (entity instanceof Map) {
Map<String, Object> map = (Map<String, Object>) entity;
for (String key : dbObject.keySet()) {
Object o = dbObject.get(key);
map.put(key, (o instanceof DBObject) ? fromDBObject(datastore, (DBObject) o) : o);
}
} else if (entity instanceof Collection) {
Collection<Object> collection = (Collection<Object>) entity;
for (Object o : ((List) dbObject)) {
collection.add((o instanceof DBObject) ? fromDBObject(datastore, (DBObject) o) : o);
}
} else {
final MappedClass mc = getMappedClass(entity);
final DBObject updated = mc.callLifecycleMethods(PreLoad.class, entity, dbObject, this);
try {
for (final MappedField mf : mc.getPersistenceFields()) {
readMappedField(datastore, mf, entity, cache, updated);
}
} catch (final MappingException e) {
Object id = dbObject.get("_id");
String entityName = entity.getClass().getName();
throw new MappingException(format("Could not map %s with ID: %s in database '%s'", entityName, id,
datastore.getDB().getName()), e);
}
if (updated.containsField("_id") && getMappedClass(entity).getIdField() != null) {
final Key key = new Key(entity.getClass(), getCollectionName(entity.getClass()), updated.get("_id"));
cache.putEntity(key, entity);
}
mc.callLifecycleMethods(PostLoad.class, entity, updated, this);
}
return entity;
} } | public class class_name {
@Deprecated
public <T> T fromDb(final Datastore datastore, final DBObject dbObject, final T entity, final EntityCache cache) {
//hack to bypass things and just read the value.
if (entity instanceof MappedField) {
readMappedField(datastore, (MappedField) entity, entity, cache, dbObject); // depends on control dependency: [if], data = [none]
return entity; // depends on control dependency: [if], data = [none]
}
// check the history key (a key is the namespace + id)
if (dbObject.containsField("_id") && getMappedClass(entity).getIdField() != null
&& getMappedClass(entity).getEntityAnnotation() != null) {
final Key<T> key = new Key(entity.getClass(), getCollectionName(entity.getClass()), dbObject.get("_id"));
final T cachedInstance = cache.getEntity(key);
if (cachedInstance != null) {
return cachedInstance; // depends on control dependency: [if], data = [none]
} else {
cache.putEntity(key, entity); // to avoid stackOverflow in recursive refs // depends on control dependency: [if], data = [none]
}
}
if (entity instanceof Map) {
Map<String, Object> map = (Map<String, Object>) entity;
for (String key : dbObject.keySet()) {
Object o = dbObject.get(key);
map.put(key, (o instanceof DBObject) ? fromDBObject(datastore, (DBObject) o) : o); // depends on control dependency: [for], data = [key]
}
} else if (entity instanceof Collection) {
Collection<Object> collection = (Collection<Object>) entity;
for (Object o : ((List) dbObject)) {
collection.add((o instanceof DBObject) ? fromDBObject(datastore, (DBObject) o) : o); // depends on control dependency: [for], data = [o]
}
} else {
final MappedClass mc = getMappedClass(entity);
final DBObject updated = mc.callLifecycleMethods(PreLoad.class, entity, dbObject, this);
try {
for (final MappedField mf : mc.getPersistenceFields()) {
readMappedField(datastore, mf, entity, cache, updated); // depends on control dependency: [for], data = [mf]
}
} catch (final MappingException e) {
Object id = dbObject.get("_id");
String entityName = entity.getClass().getName();
throw new MappingException(format("Could not map %s with ID: %s in database '%s'", entityName, id,
datastore.getDB().getName()), e);
} // depends on control dependency: [catch], data = [none]
if (updated.containsField("_id") && getMappedClass(entity).getIdField() != null) {
final Key key = new Key(entity.getClass(), getCollectionName(entity.getClass()), updated.get("_id"));
cache.putEntity(key, entity); // depends on control dependency: [if], data = [none]
}
mc.callLifecycleMethods(PostLoad.class, entity, updated, this); // depends on control dependency: [if], data = [none]
}
return entity;
} } |
public class class_name {
public Observable<ServiceResponse<NameAvailabilityInner>> executeWithServiceResponseAsync(NameAvailabilityRequest nameAvailabilityRequest) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
if (nameAvailabilityRequest == null) {
throw new IllegalArgumentException("Parameter nameAvailabilityRequest is required and cannot be null.");
}
Validator.validate(nameAvailabilityRequest);
return service.execute(this.client.subscriptionId(), this.client.apiVersion(), nameAvailabilityRequest, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<NameAvailabilityInner>>>() {
@Override
public Observable<ServiceResponse<NameAvailabilityInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<NameAvailabilityInner> clientResponse = executeDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
} } | public class class_name {
public Observable<ServiceResponse<NameAvailabilityInner>> executeWithServiceResponseAsync(NameAvailabilityRequest nameAvailabilityRequest) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
if (nameAvailabilityRequest == null) {
throw new IllegalArgumentException("Parameter nameAvailabilityRequest is required and cannot be null.");
}
Validator.validate(nameAvailabilityRequest);
return service.execute(this.client.subscriptionId(), this.client.apiVersion(), nameAvailabilityRequest, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<NameAvailabilityInner>>>() {
@Override
public Observable<ServiceResponse<NameAvailabilityInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<NameAvailabilityInner> clientResponse = executeDelegate(response);
return Observable.just(clientResponse); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
return Observable.error(t);
} // depends on control dependency: [catch], data = [none]
}
});
} } |
public class class_name {
public void write(File file) {
FileOutputStream out = null;
try {
out = new FileOutputStream(file, false);
workbook.write(out);
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
} catch (IOException e) {
new RuntimeException(e);
} finally {
if (out != null)
try {
out.close();
} catch (IOException e) {
new RuntimeException(e);
}
}
} } | public class class_name {
public void write(File file) {
FileOutputStream out = null;
try {
out = new FileOutputStream(file, false);
// depends on control dependency: [try], data = [none]
workbook.write(out);
// depends on control dependency: [try], data = [none]
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
} catch (IOException e) {
// depends on control dependency: [catch], data = [none]
new RuntimeException(e);
} finally {
// depends on control dependency: [catch], data = [none]
if (out != null)
try {
out.close();
// depends on control dependency: [try], data = [none]
} catch (IOException e) {
new RuntimeException(e);
}
// depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
@Override
public Long zCard(byte[] key) {
try {
if (isPipelined()) {
pipeline(new JedisResult(pipeline.zcard(key)));
return null;
}
return client.zcard(key);
} catch (Exception ex) {
throw convertException(ex);
}
} } | public class class_name {
@Override
public Long zCard(byte[] key) {
try {
if (isPipelined()) {
pipeline(new JedisResult(pipeline.zcard(key))); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
return client.zcard(key); // depends on control dependency: [try], data = [none]
} catch (Exception ex) {
throw convertException(ex);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
int getIntValue(Keyframe<Integer> keyframe, float keyframeProgress) {
if (keyframe.startValue == null || keyframe.endValue == null) {
throw new IllegalStateException("Missing values for keyframe.");
}
if (valueCallback != null) {
//noinspection ConstantConditions
Integer value = valueCallback.getValueInternal(keyframe.startFrame, keyframe.endFrame,
keyframe.startValue, keyframe.endValue,
keyframeProgress, getLinearCurrentKeyframeProgress(), getProgress());
if (value != null) {
return value;
}
}
return MiscUtils.lerp(keyframe.getStartValueInt(), keyframe.getEndValueInt(), keyframeProgress);
} } | public class class_name {
int getIntValue(Keyframe<Integer> keyframe, float keyframeProgress) {
if (keyframe.startValue == null || keyframe.endValue == null) {
throw new IllegalStateException("Missing values for keyframe.");
}
if (valueCallback != null) {
//noinspection ConstantConditions
Integer value = valueCallback.getValueInternal(keyframe.startFrame, keyframe.endFrame,
keyframe.startValue, keyframe.endValue,
keyframeProgress, getLinearCurrentKeyframeProgress(), getProgress());
if (value != null) {
return value; // depends on control dependency: [if], data = [none]
}
}
return MiscUtils.lerp(keyframe.getStartValueInt(), keyframe.getEndValueInt(), keyframeProgress);
} } |
public class class_name {
public long add(Object item) {
tailSequence++;
if (tailSequence - items.length == headSequence) {
headSequence++;
}
int index = toIndex(tailSequence);
items[index] = item;
return tailSequence;
} } | public class class_name {
public long add(Object item) {
tailSequence++;
if (tailSequence - items.length == headSequence) {
headSequence++; // depends on control dependency: [if], data = [none]
}
int index = toIndex(tailSequence);
items[index] = item;
return tailSequence;
} } |
public class class_name {
protected boolean verifyPatternMatch(String text) {
if (this.expectedPattern == null) {
return false;
}
return this.expectedPattern.matcher(text).matches();
} } | public class class_name {
protected boolean verifyPatternMatch(String text) {
if (this.expectedPattern == null) {
return false; // depends on control dependency: [if], data = [none]
}
return this.expectedPattern.matcher(text).matches();
} } |
public class class_name {
protected void serializeElement(Element node, boolean bStart)
throws SAXException {
if (bStart) {
fElementDepth++;
// We use the Xalan specific startElement and starPrefixMapping calls
// (and addAttribute and namespaceAfterStartElement) as opposed to
// SAX specific, for performance reasons as they reduce the overhead
// of creating an AttList object upfront.
// well-formed=true
if ((fFeatures & WELLFORMED) != 0) {
isElementWellFormed(node);
}
// REVISIT: We apply the LSSerializer filter for elements before
// namesapce fixup
if (!applyFilter(node, NodeFilter.SHOW_ELEMENT)) {
return;
}
// namespaces=true, record and fixup namspaced element
if ((fFeatures & NAMESPACES) != 0) {
fNSBinder.pushContext();
fLocalNSBinder.reset();
recordLocalNSDecl(node);
fixupElementNS(node);
}
// Namespace normalization
fSerializer.startElement(
node.getNamespaceURI(),
node.getLocalName(),
node.getNodeName());
serializeAttList(node);
} else {
fElementDepth--;
// apply the LSSerializer filter
if (!applyFilter(node, NodeFilter.SHOW_ELEMENT)) {
return;
}
this.fSerializer.endElement(
node.getNamespaceURI(),
node.getLocalName(),
node.getNodeName());
// since endPrefixMapping was not used by SerializationHandler it was removed
// for performance reasons.
if ((fFeatures & NAMESPACES) != 0 ) {
fNSBinder.popContext();
}
}
} } | public class class_name {
protected void serializeElement(Element node, boolean bStart)
throws SAXException {
if (bStart) {
fElementDepth++;
// We use the Xalan specific startElement and starPrefixMapping calls
// (and addAttribute and namespaceAfterStartElement) as opposed to
// SAX specific, for performance reasons as they reduce the overhead
// of creating an AttList object upfront.
// well-formed=true
if ((fFeatures & WELLFORMED) != 0) {
isElementWellFormed(node); // depends on control dependency: [if], data = [none]
}
// REVISIT: We apply the LSSerializer filter for elements before
// namesapce fixup
if (!applyFilter(node, NodeFilter.SHOW_ELEMENT)) {
return; // depends on control dependency: [if], data = [none]
}
// namespaces=true, record and fixup namspaced element
if ((fFeatures & NAMESPACES) != 0) {
fNSBinder.pushContext(); // depends on control dependency: [if], data = [none]
fLocalNSBinder.reset(); // depends on control dependency: [if], data = [none]
recordLocalNSDecl(node); // depends on control dependency: [if], data = [none]
fixupElementNS(node); // depends on control dependency: [if], data = [none]
}
// Namespace normalization
fSerializer.startElement(
node.getNamespaceURI(),
node.getLocalName(),
node.getNodeName());
serializeAttList(node);
} else {
fElementDepth--;
// apply the LSSerializer filter
if (!applyFilter(node, NodeFilter.SHOW_ELEMENT)) {
return; // depends on control dependency: [if], data = [none]
}
this.fSerializer.endElement(
node.getNamespaceURI(),
node.getLocalName(),
node.getNodeName());
// since endPrefixMapping was not used by SerializationHandler it was removed
// for performance reasons.
if ((fFeatures & NAMESPACES) != 0 ) {
fNSBinder.popContext(); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public RecordIterator getRecords() {
verifyProperty(dbname, "database");
verifyProperty(collectionName, "collection");
try {
final MongoClient mongo;
final DB database;
final DBCollection collection;
final DBCursor result;
final DBObject queryDocument;
final DBObject projectionDocument;
// authentication mecanism via MONGODB-CR authentication http://docs.mongodb.org/manual/core/authentication/#authentication-mongodb-cr
if(auth.equals(AUTH_ON_DB)){
verifyProperty(username, "user-name");
verifyProperty(password, "password");
mongo = new MongoClient(
new ServerAddress(mongouri, port),
Arrays.asList(MongoCredential.createMongoCRCredential(username, dbname, password.toCharArray()))
);
}
else if(auth.equals(AUTH_ON_ADMIN)){
verifyProperty(username, "user-name");
verifyProperty(password, "password");
mongo = new MongoClient(
new ServerAddress(mongouri, port),
Arrays.asList(MongoCredential.createMongoCRCredential(username, AUTH_ON_ADMIN, password.toCharArray()))
);
}
else{
mongo = new MongoClient(new ServerAddress(mongouri, port));
}
// get db, collection
database = mongo.getDB(dbname);
collection = database.getCollection(collectionName);
// execute query
queryDocument = (DBObject)JSON.parse(query);
if(projection==null){
result = collection.find(queryDocument);
}
else{
projectionDocument = (DBObject)JSON.parse(projection);
result = collection.find(queryDocument, projectionDocument);
}
// See: http://api.mongodb.org/java/current/com/mongodb/DBCursor.html#addOption(int)
// and http://api.mongodb.org/java/current/com/mongodb/Bytes.html#QUERYOPTION_NOTIMEOUT
if(noTimeOut){
result.addOption(Bytes.QUERYOPTION_NOTIMEOUT);
}
return new MongoDBIterator(result, mongo);
} catch (UnknownHostException e) {
throw new RuntimeException(e);
} catch (Exception ex){
throw new DukeException(ex);
}
} } | public class class_name {
public RecordIterator getRecords() {
verifyProperty(dbname, "database");
verifyProperty(collectionName, "collection");
try {
final MongoClient mongo;
final DB database;
final DBCollection collection;
final DBCursor result;
final DBObject queryDocument;
final DBObject projectionDocument;
// authentication mecanism via MONGODB-CR authentication http://docs.mongodb.org/manual/core/authentication/#authentication-mongodb-cr
if(auth.equals(AUTH_ON_DB)){
verifyProperty(username, "user-name"); // depends on control dependency: [if], data = [none]
verifyProperty(password, "password"); // depends on control dependency: [if], data = [none]
mongo = new MongoClient(
new ServerAddress(mongouri, port),
Arrays.asList(MongoCredential.createMongoCRCredential(username, dbname, password.toCharArray()))
); // depends on control dependency: [if], data = [none]
}
else if(auth.equals(AUTH_ON_ADMIN)){
verifyProperty(username, "user-name"); // depends on control dependency: [if], data = [none]
verifyProperty(password, "password"); // depends on control dependency: [if], data = [none]
mongo = new MongoClient(
new ServerAddress(mongouri, port),
Arrays.asList(MongoCredential.createMongoCRCredential(username, AUTH_ON_ADMIN, password.toCharArray()))
); // depends on control dependency: [if], data = [none]
}
else{
mongo = new MongoClient(new ServerAddress(mongouri, port)); // depends on control dependency: [if], data = [none]
}
// get db, collection
database = mongo.getDB(dbname); // depends on control dependency: [try], data = [none]
collection = database.getCollection(collectionName); // depends on control dependency: [try], data = [none]
// execute query
queryDocument = (DBObject)JSON.parse(query); // depends on control dependency: [try], data = [none]
if(projection==null){
result = collection.find(queryDocument); // depends on control dependency: [if], data = [none]
}
else{
projectionDocument = (DBObject)JSON.parse(projection); // depends on control dependency: [if], data = [(projection]
result = collection.find(queryDocument, projectionDocument); // depends on control dependency: [if], data = [none]
}
// See: http://api.mongodb.org/java/current/com/mongodb/DBCursor.html#addOption(int)
// and http://api.mongodb.org/java/current/com/mongodb/Bytes.html#QUERYOPTION_NOTIMEOUT
if(noTimeOut){
result.addOption(Bytes.QUERYOPTION_NOTIMEOUT); // depends on control dependency: [if], data = [none]
}
return new MongoDBIterator(result, mongo); // depends on control dependency: [try], data = [none]
} catch (UnknownHostException e) {
throw new RuntimeException(e);
} catch (Exception ex){ // depends on control dependency: [catch], data = [none]
throw new DukeException(ex);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
boolean checkReplay(int seqNo, long guessedIndex) {
// compute the index of previously received packet and its
// delta to the new received packet
long localIndex = (((long) this.roc) << 16) | this.seqNum;
long delta = guessedIndex - localIndex;
if (delta > 0) {
/* Packet not yet received */
return true;
} else {
if (-delta > REPLAY_WINDOW_SIZE) {
/* Packet too old */
return false;
} else {
if (((this.replayWindow >> (-delta)) & 0x1) != 0) {
/* Packet already received ! */
return false;
} else {
/* Packet not yet received */
return true;
}
}
}
} } | public class class_name {
boolean checkReplay(int seqNo, long guessedIndex) {
// compute the index of previously received packet and its
// delta to the new received packet
long localIndex = (((long) this.roc) << 16) | this.seqNum;
long delta = guessedIndex - localIndex;
if (delta > 0) {
/* Packet not yet received */
return true; // depends on control dependency: [if], data = [none]
} else {
if (-delta > REPLAY_WINDOW_SIZE) {
/* Packet too old */
return false; // depends on control dependency: [if], data = [none]
} else {
if (((this.replayWindow >> (-delta)) & 0x1) != 0) {
/* Packet already received ! */
return false; // depends on control dependency: [if], data = [none]
} else {
/* Packet not yet received */
return true; // depends on control dependency: [if], data = [none]
}
}
}
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.