code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
protected void rewriteLinks(CmsResource resource, Collection<CmsRelation> relations) throws CmsException {
LOG.info("Rewriting relations for resource " + resource.getRootPath());
I_CmsResourceType resourceType = OpenCms.getResourceManager().getResourceType(resource.getTypeId());
boolean hasContentLinks = false;
boolean hasOtherLinks = false;
for (CmsRelation relation : relations) {
if (relation.getType().isDefinedInContent()) {
hasContentLinks = true;
} else {
hasOtherLinks = true;
}
}
if (hasContentLinks) {
LOG.info("The resource " + resource.getRootPath() + " has links in the content.");
}
if (hasOtherLinks) {
LOG.info("The resource " + resource.getRootPath() + " has non-content links.");
}
if (hasContentLinks) {
if (resourceType instanceof I_CmsLinkParseable) {
CmsFile file = m_cms.readFile(resource);
rewriteContent(file, relations);
m_rewrittenContent.add(file.getStructureId());
}
}
if (hasOtherLinks) {
rewriteOtherRelations(resource, relations);
}
} } | public class class_name {
protected void rewriteLinks(CmsResource resource, Collection<CmsRelation> relations) throws CmsException {
LOG.info("Rewriting relations for resource " + resource.getRootPath());
I_CmsResourceType resourceType = OpenCms.getResourceManager().getResourceType(resource.getTypeId());
boolean hasContentLinks = false;
boolean hasOtherLinks = false;
for (CmsRelation relation : relations) {
if (relation.getType().isDefinedInContent()) {
hasContentLinks = true; // depends on control dependency: [if], data = [none]
} else {
hasOtherLinks = true; // depends on control dependency: [if], data = [none]
}
}
if (hasContentLinks) {
LOG.info("The resource " + resource.getRootPath() + " has links in the content.");
}
if (hasOtherLinks) {
LOG.info("The resource " + resource.getRootPath() + " has non-content links.");
}
if (hasContentLinks) {
if (resourceType instanceof I_CmsLinkParseable) {
CmsFile file = m_cms.readFile(resource);
rewriteContent(file, relations); // depends on control dependency: [if], data = [none]
m_rewrittenContent.add(file.getStructureId()); // depends on control dependency: [if], data = [none]
}
}
if (hasOtherLinks) {
rewriteOtherRelations(resource, relations);
}
} } |
public class class_name {
public ReturnValue execute(final ICommandLine cl) throws BadThresholdException {
ThresholdsEvaluatorBuilder thrb = new ThresholdsEvaluatorBuilder();
configureThresholdEvaluatorBuilder(thrb, cl);
ReturnValueBuilder builder = ReturnValueBuilder.forPlugin(getPluginName(), thrb.create());
try {
Collection<Metric> metrics = gatherMetrics(cl);
for (Metric m : metrics) {
builder.withValue(m);
}
return builder.create();
} catch (MetricGatheringException mge) {
return ReturnValueBuilder.forPlugin(getPluginName()).withForcedMessage(mge.getMessage()).withStatus(mge.getStatus()).create();
}
} } | public class class_name {
public ReturnValue execute(final ICommandLine cl) throws BadThresholdException {
ThresholdsEvaluatorBuilder thrb = new ThresholdsEvaluatorBuilder();
configureThresholdEvaluatorBuilder(thrb, cl);
ReturnValueBuilder builder = ReturnValueBuilder.forPlugin(getPluginName(), thrb.create());
try {
Collection<Metric> metrics = gatherMetrics(cl);
for (Metric m : metrics) {
builder.withValue(m); // depends on control dependency: [for], data = [m]
}
return builder.create();
} catch (MetricGatheringException mge) {
return ReturnValueBuilder.forPlugin(getPluginName()).withForcedMessage(mge.getMessage()).withStatus(mge.getStatus()).create();
}
} } |
public class class_name {
public void log(String msg) {
if (theLog != null) {
theLog.println(msg);
} else {
System.err.println(msg);
}
} } | public class class_name {
public void log(String msg) {
if (theLog != null) {
theLog.println(msg); // depends on control dependency: [if], data = [none]
} else {
System.err.println(msg); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public String getDisplayName(Locale language) {
String key;
switch (this.getSymbol()) {
case 'G':
key = "L_era";
break;
case 'u':
case 'y':
case 'Y':
key = "L_year";
break;
case 'Q':
key = "L_quarter";
break;
case 'M':
key = "L_month";
break;
case 'w':
case 'W':
key = "L_week";
break;
case 'd':
key = "L_day";
break;
case 'E':
case 'e':
key = "L_weekday";
break;
case 'H':
case 'h':
case 'K':
case 'k':
key = "L_hour";
break;
case 'm':
key = "L_minute";
break;
case 's':
key = "L_second";
break;
default:
String n = this.name();
key = OTHER_DISPLAY_KEYS.get(n);
if (key == null) {
return n;
}
}
String lname = CalendarText.getIsoInstance(language).getTextForms().get(key);
return ((lname == null) ? this.name() : lname);
} } | public class class_name {
@Override
public String getDisplayName(Locale language) {
String key;
switch (this.getSymbol()) {
case 'G':
key = "L_era";
break;
case 'u':
case 'y':
case 'Y':
key = "L_year";
break;
case 'Q':
key = "L_quarter";
break;
case 'M':
key = "L_month";
break;
case 'w':
case 'W':
key = "L_week";
break;
case 'd':
key = "L_day";
break;
case 'E':
case 'e':
key = "L_weekday";
break;
case 'H':
case 'h':
case 'K':
case 'k':
key = "L_hour";
break;
case 'm':
key = "L_minute";
break;
case 's':
key = "L_second";
break;
default:
String n = this.name();
key = OTHER_DISPLAY_KEYS.get(n);
if (key == null) {
return n; // depends on control dependency: [if], data = [none]
}
}
String lname = CalendarText.getIsoInstance(language).getTextForms().get(key);
return ((lname == null) ? this.name() : lname);
} } |
public class class_name {
public void setProxyExcludedDomains(List<DomainMatcher> proxyExcludedDomains) {
if (proxyExcludedDomains == null || proxyExcludedDomains.isEmpty()) {
((HierarchicalConfiguration) getConfig()).clearTree(ALL_PROXY_EXCLUDED_DOMAINS_KEY);
this.proxyExcludedDomains = Collections.emptyList();
this.proxyExcludedDomainsEnabled = Collections.emptyList();
return;
}
this.proxyExcludedDomains = new ArrayList<>(proxyExcludedDomains);
((HierarchicalConfiguration) getConfig()).clearTree(ALL_PROXY_EXCLUDED_DOMAINS_KEY);
int size = proxyExcludedDomains.size();
ArrayList<DomainMatcher> enabledExcludedDomains = new ArrayList<>(size);
for (int i = 0; i < size; ++i) {
String elementBaseKey = ALL_PROXY_EXCLUDED_DOMAINS_KEY + "(" + i + ").";
DomainMatcher excludedDomain = proxyExcludedDomains.get(i);
getConfig().setProperty(elementBaseKey + PROXY_EXCLUDED_DOMAIN_VALUE_KEY, excludedDomain.getValue());
getConfig().setProperty(elementBaseKey + PROXY_EXCLUDED_DOMAIN_REGEX_KEY, excludedDomain.isRegex());
getConfig().setProperty(elementBaseKey + PROXY_EXCLUDED_DOMAIN_ENABLED_KEY, excludedDomain.isEnabled());
if (excludedDomain.isEnabled()) {
enabledExcludedDomains.add(excludedDomain);
}
}
enabledExcludedDomains.trimToSize();
this.proxyExcludedDomainsEnabled = enabledExcludedDomains;
} } | public class class_name {
public void setProxyExcludedDomains(List<DomainMatcher> proxyExcludedDomains) {
if (proxyExcludedDomains == null || proxyExcludedDomains.isEmpty()) {
((HierarchicalConfiguration) getConfig()).clearTree(ALL_PROXY_EXCLUDED_DOMAINS_KEY);
// depends on control dependency: [if], data = [none]
this.proxyExcludedDomains = Collections.emptyList();
// depends on control dependency: [if], data = [none]
this.proxyExcludedDomainsEnabled = Collections.emptyList();
// depends on control dependency: [if], data = [none]
return;
// depends on control dependency: [if], data = [none]
}
this.proxyExcludedDomains = new ArrayList<>(proxyExcludedDomains);
((HierarchicalConfiguration) getConfig()).clearTree(ALL_PROXY_EXCLUDED_DOMAINS_KEY);
int size = proxyExcludedDomains.size();
ArrayList<DomainMatcher> enabledExcludedDomains = new ArrayList<>(size);
for (int i = 0; i < size; ++i) {
String elementBaseKey = ALL_PROXY_EXCLUDED_DOMAINS_KEY + "(" + i + ").";
DomainMatcher excludedDomain = proxyExcludedDomains.get(i);
getConfig().setProperty(elementBaseKey + PROXY_EXCLUDED_DOMAIN_VALUE_KEY, excludedDomain.getValue());
// depends on control dependency: [for], data = [none]
getConfig().setProperty(elementBaseKey + PROXY_EXCLUDED_DOMAIN_REGEX_KEY, excludedDomain.isRegex());
// depends on control dependency: [for], data = [none]
getConfig().setProperty(elementBaseKey + PROXY_EXCLUDED_DOMAIN_ENABLED_KEY, excludedDomain.isEnabled());
// depends on control dependency: [for], data = [none]
if (excludedDomain.isEnabled()) {
enabledExcludedDomains.add(excludedDomain);
// depends on control dependency: [if], data = [none]
}
}
enabledExcludedDomains.trimToSize();
this.proxyExcludedDomainsEnabled = enabledExcludedDomains;
} } |
public class class_name {
private String transformRecursive(String original, State state) {
String value = original;
if (this.parent != null) {
value = this.parent.transformRecursive(original, state);
if (state.stop) {
return value;
}
}
for (StringTransformerRule rule : this.rules) {
String transformed = rule.transform(value);
if ((transformed != value) && (rule.isStopOnMatch())) {
state.stop = true;
return transformed;
}
value = transformed;
}
return value;
} } | public class class_name {
private String transformRecursive(String original, State state) {
String value = original;
if (this.parent != null) {
value = this.parent.transformRecursive(original, state); // depends on control dependency: [if], data = [none]
if (state.stop) {
return value; // depends on control dependency: [if], data = [none]
}
}
for (StringTransformerRule rule : this.rules) {
String transformed = rule.transform(value);
if ((transformed != value) && (rule.isStopOnMatch())) {
state.stop = true; // depends on control dependency: [if], data = [none]
return transformed; // depends on control dependency: [if], data = [none]
}
value = transformed; // depends on control dependency: [for], data = [none]
}
return value;
} } |
public class class_name {
private JsonObject lookForRelation(String oid, String field,
JsonSimple config, JsonSimple baseNode) {
JsonObject newRelation = new JsonObject();
newRelation.put("field", field);
newRelation.put("authority", true);
// ** -1- ** EXCLUSIONS
List<String> exPath = config.getStringList("excludeCondition", "path");
String exValue = config.getString(null, "excludeCondition", "value");
if (exPath != null && !exPath.isEmpty() && exValue != null) {
String value = baseNode.getString(null, exPath.toArray());
if (value != null && value.equals(exValue)) {
log.info("Excluding relationship '{}' based on config", field);
return null;
}
}
String exStartsWith = config.getString(null, "excludeCondition",
"startsWith");
String exDoesntStartWith = config.getString(null, "excludeCondition",
"doesntStartWith");
if (exPath != null && !exPath.isEmpty() && exStartsWith != null) {
String value = baseNode.getString(null, exPath.toArray());
if (value != null && value.startsWith(exStartsWith)) {
log.info("Excluding relationship '{}' based on config", field);
return null;
}
}else {
if (exPath != null) {
String value = baseNode.getString(null, exPath.toArray());
if (value != null) {
if (exDoesntStartWith instanceof String && !value.startsWith(exDoesntStartWith)) {
log.info("Excluding relationship '{}' based on config", field);
return null;
}
}
}
}
// ** -2- ** IDENTIFIER
// Inside that object where can we find the identifier
List<String> idPath = config.getStringList("identifier");
if (idPath == null || idPath.isEmpty()) {
log.error("Ignoring invalid relationship '{}'. No 'identifier'"
+ " provided in configuration", field);
return null;
}
String id = baseNode.getString(null, idPath.toArray());
if (id != null && !id.equals("")) {
newRelation.put("identifier", id.trim());
} else {
log.info("Relationship '{}' has no identifier, ignoring!", field);
return null;
}
// ** -3- ** RELATIONSHIP TYPE
// Relationship type, it may be static and provided for us...
String staticRelation = config.getString(null, "relationship");
List<String> relPath = null;
if (staticRelation == null) {
// ... or it could be found in the form data
relPath = config.getStringList("relationship");
}
// But we have to have one.
if (staticRelation == null && (relPath == null || relPath.isEmpty())) {
log.error("Ignoring invalid relationship '{}'. No relationship"
+ " String of path in configuration", field);
return null;
}
String relString = null;
if (staticRelation != null) {
relString = staticRelation;
} else {
relString = baseNode.getString("hasAssociationWith",
relPath.toArray());
}
if (relString == null || relString.equals("")) {
log.info("Relationship '{}' has no type, ignoring!", field);
return null;
}
newRelation.put("relationship", relString);
// ** -4- ** REVERSE RELATIONS
String revRelation = systemConfig.getString("hasAssociationWith",
"curation", "reverseMappings", relString);
newRelation.put("reverseRelationship", revRelation);
// ** -5- ** DESCRIPTION
String description = config.getString(null, "description");
if (description != null) {
newRelation.put("description", description);
}
// ** -6- ** SYSTEM / BROKER
String system = config.getString("mint", "system");
if (system != null && system.equals("mint")) {
newRelation.put("broker", mintBroker);
} else {
newRelation.put("broker", brokerUrl);
// ReDBox record's should also be told that the ID is an OID
// JCU: causes an exception in CurationManager.
// checkChildren() will convert the identifier to an oid when a
// 'curation-confirm' is processed
// newRelation.put("oid", id);
}
// ** -7- ** OPTIONAL
boolean optional = config.getBoolean(false, "optional");
if (optional) {
newRelation.put("optional", optional);
}
return newRelation;
} } | public class class_name {
private JsonObject lookForRelation(String oid, String field,
JsonSimple config, JsonSimple baseNode) {
JsonObject newRelation = new JsonObject();
newRelation.put("field", field);
newRelation.put("authority", true);
// ** -1- ** EXCLUSIONS
List<String> exPath = config.getStringList("excludeCondition", "path");
String exValue = config.getString(null, "excludeCondition", "value");
if (exPath != null && !exPath.isEmpty() && exValue != null) {
String value = baseNode.getString(null, exPath.toArray());
if (value != null && value.equals(exValue)) {
log.info("Excluding relationship '{}' based on config", field); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
}
String exStartsWith = config.getString(null, "excludeCondition",
"startsWith");
String exDoesntStartWith = config.getString(null, "excludeCondition",
"doesntStartWith");
if (exPath != null && !exPath.isEmpty() && exStartsWith != null) {
String value = baseNode.getString(null, exPath.toArray());
if (value != null && value.startsWith(exStartsWith)) {
log.info("Excluding relationship '{}' based on config", field); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
}else {
if (exPath != null) {
String value = baseNode.getString(null, exPath.toArray());
if (value != null) {
if (exDoesntStartWith instanceof String && !value.startsWith(exDoesntStartWith)) {
log.info("Excluding relationship '{}' based on config", field); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
}
}
}
// ** -2- ** IDENTIFIER
// Inside that object where can we find the identifier
List<String> idPath = config.getStringList("identifier");
if (idPath == null || idPath.isEmpty()) {
log.error("Ignoring invalid relationship '{}'. No 'identifier'"
+ " provided in configuration", field); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
String id = baseNode.getString(null, idPath.toArray());
if (id != null && !id.equals("")) {
newRelation.put("identifier", id.trim()); // depends on control dependency: [if], data = [none]
} else {
log.info("Relationship '{}' has no identifier, ignoring!", field); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
// ** -3- ** RELATIONSHIP TYPE
// Relationship type, it may be static and provided for us...
String staticRelation = config.getString(null, "relationship");
List<String> relPath = null;
if (staticRelation == null) {
// ... or it could be found in the form data
relPath = config.getStringList("relationship"); // depends on control dependency: [if], data = [none]
}
// But we have to have one.
if (staticRelation == null && (relPath == null || relPath.isEmpty())) {
log.error("Ignoring invalid relationship '{}'. No relationship"
+ " String of path in configuration", field); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
String relString = null;
if (staticRelation != null) {
relString = staticRelation; // depends on control dependency: [if], data = [none]
} else {
relString = baseNode.getString("hasAssociationWith",
relPath.toArray()); // depends on control dependency: [if], data = [none]
}
if (relString == null || relString.equals("")) {
log.info("Relationship '{}' has no type, ignoring!", field); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
newRelation.put("relationship", relString);
// ** -4- ** REVERSE RELATIONS
String revRelation = systemConfig.getString("hasAssociationWith",
"curation", "reverseMappings", relString);
newRelation.put("reverseRelationship", revRelation);
// ** -5- ** DESCRIPTION
String description = config.getString(null, "description");
if (description != null) {
newRelation.put("description", description); // depends on control dependency: [if], data = [none]
}
// ** -6- ** SYSTEM / BROKER
String system = config.getString("mint", "system");
if (system != null && system.equals("mint")) {
newRelation.put("broker", mintBroker); // depends on control dependency: [if], data = [none]
} else {
newRelation.put("broker", brokerUrl); // depends on control dependency: [if], data = [none]
// ReDBox record's should also be told that the ID is an OID
// JCU: causes an exception in CurationManager.
// checkChildren() will convert the identifier to an oid when a
// 'curation-confirm' is processed
// newRelation.put("oid", id);
}
// ** -7- ** OPTIONAL
boolean optional = config.getBoolean(false, "optional");
if (optional) {
newRelation.put("optional", optional); // depends on control dependency: [if], data = [none]
}
return newRelation;
} } |
public class class_name {
private void onValidateTable(KsDef ksDef, TableInfo tableInfo) throws Exception
{
boolean tablefound = false;
for (CfDef cfDef : ksDef.getCf_defs())
{
if (cfDef.getName().equals(tableInfo.getTableName())/*
* && (cfDef.
* getColumn_type
* ().equals(
* ColumnFamilyType
* .
* getInstanceOf
* (
* tableInfo.getType
* ()).name()))
*/)
{
if (cfDef.getColumn_type().equals(ColumnFamilyType.Standard.name()))
{
for (ColumnInfo columnInfo : tableInfo.getColumnMetadatas())
{
onValidateColumn(tableInfo, cfDef, columnInfo);
}
tablefound = true;
break;
}
else if (cfDef.getColumn_type().equals(ColumnFamilyType.Super.name()))
{
tablefound = true;
}
}
}
if (!tablefound)
{
throw new SchemaGenerationException("Column family " + tableInfo.getTableName()
+ " does not exist in keyspace " + databaseName + "", "Cassandra", databaseName,
tableInfo.getTableName());
}
} } | public class class_name {
private void onValidateTable(KsDef ksDef, TableInfo tableInfo) throws Exception
{
boolean tablefound = false;
for (CfDef cfDef : ksDef.getCf_defs())
{
if (cfDef.getName().equals(tableInfo.getTableName())/*
* && (cfDef.
* getColumn_type
* ().equals(
* ColumnFamilyType
* .
* getInstanceOf
* (
* tableInfo.getType
* ()).name()))
*/)
{
if (cfDef.getColumn_type().equals(ColumnFamilyType.Standard.name()))
{
for (ColumnInfo columnInfo : tableInfo.getColumnMetadatas())
{
onValidateColumn(tableInfo, cfDef, columnInfo); // depends on control dependency: [for], data = [columnInfo]
}
tablefound = true; // depends on control dependency: [if], data = [none]
break;
}
else if (cfDef.getColumn_type().equals(ColumnFamilyType.Super.name()))
{
tablefound = true; // depends on control dependency: [if], data = [none]
}
}
}
if (!tablefound)
{
throw new SchemaGenerationException("Column family " + tableInfo.getTableName()
+ " does not exist in keyspace " + databaseName + "", "Cassandra", databaseName,
tableInfo.getTableName());
}
} } |
public class class_name {
public void notifyAddOnFailedUninstallation(final AddOn addOn) {
if (EventQueue.isDispatchThread()) {
installedAddOnsModel.notifyAddOnFailedUninstallation(addOn);
} else {
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
notifyAddOnFailedUninstallation(addOn);
}
});
}
} } | public class class_name {
public void notifyAddOnFailedUninstallation(final AddOn addOn) {
if (EventQueue.isDispatchThread()) {
installedAddOnsModel.notifyAddOnFailedUninstallation(addOn);
// depends on control dependency: [if], data = [none]
} else {
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
notifyAddOnFailedUninstallation(addOn);
}
});
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public String getSessionUserName(HttpServletRequest request, HttpServletResponse response)
{
HttpSession session = null;
SessionAffinityContext sac = getSessionAffinityContext(request);
if (!sac.isFirstSessionIdValid() && !sac.isAllSessionIdsSetViaSet()) { //PM89885
@SuppressWarnings("rawtypes")
List allSessionIds = _sam.getAllCookieValues(request);
sac.setAllSessionIds(allSessionIds);
_sam.setNextId(sac); // we got rid of the first one in setAllSessionIds, get the next one so that we have an id to work with
}
String id = _sam.getInUseSessionID(request, sac);
if (id != null) {
session = (HttpSession) _coreHttpSessionManager.getSession(request, response, sac, false); // don't create here
}
if( session != null ){
SessionData sd = (SessionData) session;
return sd.getUserName();
} else{
return null;
}
} } | public class class_name {
public String getSessionUserName(HttpServletRequest request, HttpServletResponse response)
{
HttpSession session = null;
SessionAffinityContext sac = getSessionAffinityContext(request);
if (!sac.isFirstSessionIdValid() && !sac.isAllSessionIdsSetViaSet()) { //PM89885
@SuppressWarnings("rawtypes")
List allSessionIds = _sam.getAllCookieValues(request);
sac.setAllSessionIds(allSessionIds); // depends on control dependency: [if], data = [none]
_sam.setNextId(sac); // we got rid of the first one in setAllSessionIds, get the next one so that we have an id to work with // depends on control dependency: [if], data = [none]
}
String id = _sam.getInUseSessionID(request, sac);
if (id != null) {
session = (HttpSession) _coreHttpSessionManager.getSession(request, response, sac, false); // don't create here // depends on control dependency: [if], data = [none]
}
if( session != null ){
SessionData sd = (SessionData) session;
return sd.getUserName(); // depends on control dependency: [if], data = [none]
} else{
return null; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static List<String> unzip(final File zipFile, final String destination, String encoding) {
List<String> fileNames = CollectUtils.newArrayList();
String dest = destination;
if (!destination.endsWith(File.separator)) {
dest = destination + File.separator;
}
ZipFile file;
try {
file = null;
if (null == encoding) file = new ZipFile(zipFile);
else file = new ZipFile(zipFile, encoding);
Enumeration<ZipArchiveEntry> en = file.getEntries();
ZipArchiveEntry ze = null;
while (en.hasMoreElements()) {
ze = en.nextElement();
File f = new File(dest, ze.getName());
if (ze.isDirectory()) {
f.mkdirs();
continue;
} else {
f.getParentFile().mkdirs();
InputStream is = file.getInputStream(ze);
OutputStream os = new FileOutputStream(f);
IOs.copy(is, os);
is.close();
os.close();
fileNames.add(f.getAbsolutePath());
}
}
file.close();
} catch (IOException e) {
e.printStackTrace();
}
return fileNames;
} } | public class class_name {
public static List<String> unzip(final File zipFile, final String destination, String encoding) {
List<String> fileNames = CollectUtils.newArrayList();
String dest = destination;
if (!destination.endsWith(File.separator)) {
dest = destination + File.separator; // depends on control dependency: [if], data = [none]
}
ZipFile file;
try {
file = null; // depends on control dependency: [try], data = [none]
if (null == encoding) file = new ZipFile(zipFile);
else file = new ZipFile(zipFile, encoding);
Enumeration<ZipArchiveEntry> en = file.getEntries();
ZipArchiveEntry ze = null;
while (en.hasMoreElements()) {
ze = en.nextElement(); // depends on control dependency: [while], data = [none]
File f = new File(dest, ze.getName());
if (ze.isDirectory()) {
f.mkdirs(); // depends on control dependency: [if], data = [none]
continue;
} else {
f.getParentFile().mkdirs(); // depends on control dependency: [if], data = [none]
InputStream is = file.getInputStream(ze);
OutputStream os = new FileOutputStream(f);
IOs.copy(is, os); // depends on control dependency: [if], data = [none]
is.close(); // depends on control dependency: [if], data = [none]
os.close(); // depends on control dependency: [if], data = [none]
fileNames.add(f.getAbsolutePath()); // depends on control dependency: [if], data = [none]
}
}
file.close(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
return fileNames;
} } |
public class class_name {
public static String findPath(Method definition) {
Request request = definition.getAnnotation(Request.class);
if(request != null) {
return request.path();
}
String path = "";
Annotation[] annotations = definition.getAnnotations();
for (Annotation annotation : annotations) {
if(annotation.annotationType().isAnnotationPresent(Request.class)) {
Class<? extends Annotation> type = annotation.annotationType();
path = type.getAnnotation(Request.class).path();
if("".equals(path)) {
path = type.equals(GET.class)? ((GET)annotation).value() :
type.equals(POST.class)? ((POST)annotation).value() :
type.equals(PUT.class)? ((PUT)annotation).value() :
type.equals(PATCH.class)? ((PATCH)annotation).value() :
type.equals(DELETE.class)? ((DELETE)annotation).value() :
type.equals(HEAD.class)? ((HEAD)annotation).value() :
type.equals(TRACE.class)? ((TRACE)annotation).value() :
type.equals(OPTIONS.class)? ((OPTIONS)annotation).value() : "";
}
break;
}
}
return path;
} } | public class class_name {
public static String findPath(Method definition) {
Request request = definition.getAnnotation(Request.class);
if(request != null) {
return request.path(); // depends on control dependency: [if], data = [none]
}
String path = "";
Annotation[] annotations = definition.getAnnotations();
for (Annotation annotation : annotations) {
if(annotation.annotationType().isAnnotationPresent(Request.class)) {
Class<? extends Annotation> type = annotation.annotationType();
path = type.getAnnotation(Request.class).path();
if("".equals(path)) {
path = type.equals(GET.class)? ((GET)annotation).value() :
type.equals(POST.class)? ((POST)annotation).value() :
type.equals(PUT.class)? ((PUT)annotation).value() :
type.equals(PATCH.class)? ((PATCH)annotation).value() :
type.equals(DELETE.class)? ((DELETE)annotation).value() :
type.equals(HEAD.class)? ((HEAD)annotation).value() :
type.equals(TRACE.class)? ((TRACE)annotation).value() :
type.equals(OPTIONS.class)? ((OPTIONS)annotation).value() : "";
}
break;
}
}
return path;
} } |
public class class_name {
protected boolean validateToken(SignedJWT jwtToken) {
boolean isValid = validateSignature(jwtToken);
if (isValid) {
isValid = validateExpiration(jwtToken);
if (!isValid) {
LOG.warn("Expiration time validation of JWT token failed.");
}
} else {
LOG.warn("Signature of JWT token could not be verified. Please check the public key");
}
return isValid;
} } | public class class_name {
protected boolean validateToken(SignedJWT jwtToken) {
boolean isValid = validateSignature(jwtToken);
if (isValid) {
isValid = validateExpiration(jwtToken); // depends on control dependency: [if], data = [none]
if (!isValid) {
LOG.warn("Expiration time validation of JWT token failed."); // depends on control dependency: [if], data = [none]
}
} else {
LOG.warn("Signature of JWT token could not be verified. Please check the public key"); // depends on control dependency: [if], data = [none]
}
return isValid;
} } |
public class class_name {
public static String lowerCaseFirstChar(String s) {
if (s.isEmpty()) {
return s;
}
char first = s.charAt(0);
if (isLowerCase(first)) {
return s;
}
return toLowerCase(first) + s.substring(1);
} } | public class class_name {
public static String lowerCaseFirstChar(String s) {
if (s.isEmpty()) {
return s; // depends on control dependency: [if], data = [none]
}
char first = s.charAt(0);
if (isLowerCase(first)) {
return s; // depends on control dependency: [if], data = [none]
}
return toLowerCase(first) + s.substring(1);
} } |
public class class_name {
public static PrimitiveIterator.OfInt iterator(Spliterator.OfInt spliterator) {
Objects.requireNonNull(spliterator);
class Adapter implements PrimitiveIterator.OfInt, IntConsumer {
boolean valueReady = false;
int nextElement;
@Override
public void accept(int t) {
valueReady = true;
nextElement = t;
}
@Override
public boolean hasNext() {
if (!valueReady)
spliterator.tryAdvance(this);
return valueReady;
}
@Override
public int nextInt() {
if (!valueReady && !hasNext())
throw new NoSuchElementException();
else {
valueReady = false;
return nextElement;
}
}
}
return new Adapter();
} } | public class class_name {
public static PrimitiveIterator.OfInt iterator(Spliterator.OfInt spliterator) {
Objects.requireNonNull(spliterator);
class Adapter implements PrimitiveIterator.OfInt, IntConsumer {
boolean valueReady = false;
int nextElement;
@Override
public void accept(int t) {
valueReady = true;
nextElement = t;
}
@Override
public boolean hasNext() {
if (!valueReady)
spliterator.tryAdvance(this);
return valueReady;
}
@Override
public int nextInt() {
if (!valueReady && !hasNext())
throw new NoSuchElementException();
else {
valueReady = false; // depends on control dependency: [if], data = [none]
return nextElement; // depends on control dependency: [if], data = [none]
}
}
}
return new Adapter();
} } |
public class class_name {
public CondItem genCond(JCTree _tree, boolean markBranches) {
JCTree inner_tree = TreeInfo.skipParens(_tree);
if (inner_tree.hasTag(CONDEXPR)) {
JCConditional tree = (JCConditional)inner_tree;
CondItem cond = genCond(tree.cond, CRT_FLOW_CONTROLLER);
if (cond.isTrue()) {
code.resolve(cond.trueJumps);
CondItem result = genCond(tree.truepart, CRT_FLOW_TARGET);
if (markBranches) result.tree = tree.truepart;
return result;
}
if (cond.isFalse()) {
code.resolve(cond.falseJumps);
CondItem result = genCond(tree.falsepart, CRT_FLOW_TARGET);
if (markBranches) result.tree = tree.falsepart;
return result;
}
Chain secondJumps = cond.jumpFalse();
code.resolve(cond.trueJumps);
CondItem first = genCond(tree.truepart, CRT_FLOW_TARGET);
if (markBranches) first.tree = tree.truepart;
Chain falseJumps = first.jumpFalse();
code.resolve(first.trueJumps);
Chain trueJumps = code.branch(goto_);
code.resolve(secondJumps);
CondItem second = genCond(tree.falsepart, CRT_FLOW_TARGET);
CondItem result = items.makeCondItem(second.opcode,
Code.mergeChains(trueJumps, second.trueJumps),
Code.mergeChains(falseJumps, second.falseJumps));
if (markBranches) result.tree = tree.falsepart;
return result;
} else {
CondItem result = genExpr(_tree, syms.booleanType).mkCond();
if (markBranches) result.tree = _tree;
return result;
}
} } | public class class_name {
public CondItem genCond(JCTree _tree, boolean markBranches) {
JCTree inner_tree = TreeInfo.skipParens(_tree);
if (inner_tree.hasTag(CONDEXPR)) {
JCConditional tree = (JCConditional)inner_tree;
CondItem cond = genCond(tree.cond, CRT_FLOW_CONTROLLER);
if (cond.isTrue()) {
code.resolve(cond.trueJumps); // depends on control dependency: [if], data = [none]
CondItem result = genCond(tree.truepart, CRT_FLOW_TARGET);
if (markBranches) result.tree = tree.truepart;
return result; // depends on control dependency: [if], data = [none]
}
if (cond.isFalse()) {
code.resolve(cond.falseJumps); // depends on control dependency: [if], data = [none]
CondItem result = genCond(tree.falsepart, CRT_FLOW_TARGET);
if (markBranches) result.tree = tree.falsepart;
return result; // depends on control dependency: [if], data = [none]
}
Chain secondJumps = cond.jumpFalse();
code.resolve(cond.trueJumps); // depends on control dependency: [if], data = [none]
CondItem first = genCond(tree.truepart, CRT_FLOW_TARGET);
if (markBranches) first.tree = tree.truepart;
Chain falseJumps = first.jumpFalse();
code.resolve(first.trueJumps); // depends on control dependency: [if], data = [none]
Chain trueJumps = code.branch(goto_);
code.resolve(secondJumps); // depends on control dependency: [if], data = [none]
CondItem second = genCond(tree.falsepart, CRT_FLOW_TARGET);
CondItem result = items.makeCondItem(second.opcode,
Code.mergeChains(trueJumps, second.trueJumps),
Code.mergeChains(falseJumps, second.falseJumps));
if (markBranches) result.tree = tree.falsepart;
return result; // depends on control dependency: [if], data = [none]
} else {
CondItem result = genExpr(_tree, syms.booleanType).mkCond();
if (markBranches) result.tree = _tree;
return result; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@SuppressWarnings("PMD.AvoidDeeplyNestedIfStmts") // agreed PMD, fixme
private void clean(RallyCollector collector, List<RallyProject> existingProjects) {
Set<ObjectId> uniqueIDs = new HashSet<>();
for (com.capitalone.dashboard.model.Component comp : dbComponentRepository.findAll()) {
if (comp.getCollectorItems() != null && !comp.getCollectorItems().isEmpty()) {
List<CollectorItem> itemList = comp.getCollectorItems().get(CollectorType.AgileTool);
if (itemList != null) {
for (CollectorItem ci : itemList) {
if (ci != null && ci.getCollectorId().equals(collector.getId())) {
uniqueIDs.add(ci.getId());
}
}
}
}
}
List<RallyProject> stateChangeJobList = new ArrayList<>();
Set<ObjectId> udId = new HashSet<>();
udId.add(collector.getId());
for (RallyProject job : existingProjects) {
// collect the jobs that need to change state : enabled vs disabled.
if ((job.isEnabled() && !uniqueIDs.contains(job.getId())) || // if
// it
// was
// enabled
// but
// not
// on
// a
// dashboard
(!job.isEnabled() && uniqueIDs.contains(job.getId()))) { // OR
// it
// was
// disabled
// and
// now
// on
// a
// dashboard
job.setEnabled(uniqueIDs.contains(job.getId()));
stateChangeJobList.add(job);
}
}
if (!CollectionUtils.isEmpty(stateChangeJobList)) {
rallyProjectRepository.save(stateChangeJobList);
}
} } | public class class_name {
@SuppressWarnings("PMD.AvoidDeeplyNestedIfStmts") // agreed PMD, fixme
private void clean(RallyCollector collector, List<RallyProject> existingProjects) {
Set<ObjectId> uniqueIDs = new HashSet<>();
for (com.capitalone.dashboard.model.Component comp : dbComponentRepository.findAll()) {
if (comp.getCollectorItems() != null && !comp.getCollectorItems().isEmpty()) {
List<CollectorItem> itemList = comp.getCollectorItems().get(CollectorType.AgileTool);
if (itemList != null) {
for (CollectorItem ci : itemList) {
if (ci != null && ci.getCollectorId().equals(collector.getId())) {
uniqueIDs.add(ci.getId()); // depends on control dependency: [if], data = [(ci]
}
}
}
}
}
List<RallyProject> stateChangeJobList = new ArrayList<>();
Set<ObjectId> udId = new HashSet<>();
udId.add(collector.getId());
for (RallyProject job : existingProjects) {
// collect the jobs that need to change state : enabled vs disabled.
if ((job.isEnabled() && !uniqueIDs.contains(job.getId())) || // if
// it
// was
// enabled
// but
// not
// on
// a
// dashboard
(!job.isEnabled() && uniqueIDs.contains(job.getId()))) { // OR
// it
// was
// disabled
// and
// now
// on
// a
// dashboard
job.setEnabled(uniqueIDs.contains(job.getId())); // depends on control dependency: [if], data = [none]
stateChangeJobList.add(job); // depends on control dependency: [if], data = [none]
}
}
if (!CollectionUtils.isEmpty(stateChangeJobList)) {
rallyProjectRepository.save(stateChangeJobList); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public Sender getResponseSender() {
if (blockingHttpExchange != null) {
return blockingHttpExchange.getSender();
}
if (sender != null) {
return sender;
}
return sender = new AsyncSenderImpl(this);
} } | public class class_name {
public Sender getResponseSender() {
if (blockingHttpExchange != null) {
return blockingHttpExchange.getSender(); // depends on control dependency: [if], data = [none]
}
if (sender != null) {
return sender; // depends on control dependency: [if], data = [none]
}
return sender = new AsyncSenderImpl(this);
} } |
public class class_name {
private static Bitmap createEdgeShadow(@NonNull final Context context, final int elevation,
@NonNull final Orientation orientation,
final boolean parallelLight) {
if (elevation == 0) {
return null;
} else {
float shadowWidth = getShadowWidth(context, elevation, orientation, parallelLight);
int shadowColor = getShadowColor(elevation, orientation, parallelLight);
int bitmapWidth = (int) Math
.round((orientation == Orientation.LEFT || orientation == Orientation.RIGHT) ?
Math.ceil(shadowWidth) : 1);
int bitmapHeight = (int) Math
.round((orientation == Orientation.TOP || orientation == Orientation.BOTTOM) ?
Math.ceil(shadowWidth) : 1);
Bitmap bitmap = Bitmap.createBitmap(bitmapWidth, bitmapHeight, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
Shader linearGradient =
createLinearGradient(orientation, bitmapWidth, bitmapHeight, shadowWidth,
shadowColor);
Paint paint = new Paint();
paint.setAntiAlias(true);
paint.setDither(true);
paint.setShader(linearGradient);
canvas.drawRect(0, 0, bitmapWidth, bitmapHeight, paint);
return bitmap;
}
} } | public class class_name {
private static Bitmap createEdgeShadow(@NonNull final Context context, final int elevation,
@NonNull final Orientation orientation,
final boolean parallelLight) {
if (elevation == 0) {
return null; // depends on control dependency: [if], data = [none]
} else {
float shadowWidth = getShadowWidth(context, elevation, orientation, parallelLight);
int shadowColor = getShadowColor(elevation, orientation, parallelLight);
int bitmapWidth = (int) Math
.round((orientation == Orientation.LEFT || orientation == Orientation.RIGHT) ?
Math.ceil(shadowWidth) : 1);
int bitmapHeight = (int) Math
.round((orientation == Orientation.TOP || orientation == Orientation.BOTTOM) ?
Math.ceil(shadowWidth) : 1);
Bitmap bitmap = Bitmap.createBitmap(bitmapWidth, bitmapHeight, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
Shader linearGradient =
createLinearGradient(orientation, bitmapWidth, bitmapHeight, shadowWidth,
shadowColor);
Paint paint = new Paint();
paint.setAntiAlias(true); // depends on control dependency: [if], data = [none]
paint.setDither(true); // depends on control dependency: [if], data = [none]
paint.setShader(linearGradient); // depends on control dependency: [if], data = [none]
canvas.drawRect(0, 0, bitmapWidth, bitmapHeight, paint); // depends on control dependency: [if], data = [none]
return bitmap; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public DeviceType getDeviceType(Context activityContext) {
// Verifies if the Generalized Size of the device is XLARGE to be
// considered a Tablet
boolean xlarge = ((activityContext.getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK) == SCREENLAYOUT_SIZE_XLARGE);
// If XLarge, checks if the Generalized Density is at least MDPI
// (160dpi)
if (xlarge) {
DisplayMetrics metrics = new DisplayMetrics();
Activity activity = (Activity)activityContext;
activity.getWindowManager().getDefaultDisplay().getMetrics(metrics);
// MDPI=160, DEFAULT=160, DENSITY_HIGH=240, DENSITY_MEDIUM=160,
// DENSITY_TV=213, DENSITY_XHIGH=320
if (metrics.densityDpi == DisplayMetrics.DENSITY_DEFAULT
|| metrics.densityDpi == DisplayMetrics.DENSITY_HIGH
|| metrics.densityDpi == DisplayMetrics.DENSITY_MEDIUM
|| metrics.densityDpi == DENSITY_TV
|| metrics.densityDpi == DENSITY_XHIGH) {
// this is a tablet!
return DeviceType.Tablet;
}
}
// this is not a tablet!
return DeviceType.Handset;
} } | public class class_name {
public DeviceType getDeviceType(Context activityContext) {
// Verifies if the Generalized Size of the device is XLARGE to be
// considered a Tablet
boolean xlarge = ((activityContext.getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK) == SCREENLAYOUT_SIZE_XLARGE);
// If XLarge, checks if the Generalized Density is at least MDPI
// (160dpi)
if (xlarge) {
DisplayMetrics metrics = new DisplayMetrics();
Activity activity = (Activity)activityContext;
activity.getWindowManager().getDefaultDisplay().getMetrics(metrics); // depends on control dependency: [if], data = [none]
// MDPI=160, DEFAULT=160, DENSITY_HIGH=240, DENSITY_MEDIUM=160,
// DENSITY_TV=213, DENSITY_XHIGH=320
if (metrics.densityDpi == DisplayMetrics.DENSITY_DEFAULT
|| metrics.densityDpi == DisplayMetrics.DENSITY_HIGH
|| metrics.densityDpi == DisplayMetrics.DENSITY_MEDIUM
|| metrics.densityDpi == DENSITY_TV
|| metrics.densityDpi == DENSITY_XHIGH) {
// this is a tablet!
return DeviceType.Tablet; // depends on control dependency: [if], data = [none]
}
}
// this is not a tablet!
return DeviceType.Handset;
} } |
public class class_name {
private List<ORecordOperation> commit(final OTransactionInternal transaction, final boolean allocated) {
// XXX: At this moment, there are two implementations of the commit method. One for regular client transactions and one for
// implicit micro-transactions. The implementations are quite identical, but operate on slightly different data. If you change
// this method don't forget to change its counterpart:
//
// OAbstractPaginatedStorage.commit(com.orientechnologies.orient.core.storage.impl.local.OMicroTransaction)
try {
checkOpenness();
checkLowDiskSpaceRequestsAndReadOnlyConditions();
txBegun.incrementAndGet();
final ODatabaseDocumentInternal database = transaction.getDatabase();
final OIndexManager indexManager = database.getMetadata().getIndexManager();
final TreeMap<String, OTransactionIndexChanges> indexOperations = getSortedIndexOperations(transaction);
database.getMetadata().makeThreadLocalSchemaSnapshot();
final Collection<ORecordOperation> recordOperations = transaction.getRecordOperations();
final TreeMap<Integer, OCluster> clustersToLock = new TreeMap<>();
final Map<ORecordOperation, Integer> clusterOverrides = new IdentityHashMap<>(8);
final Set<ORecordOperation> newRecords = new TreeSet<>(COMMIT_RECORD_OPERATION_COMPARATOR);
for (final ORecordOperation recordOperation : recordOperations) {
if (recordOperation.type == ORecordOperation.CREATED || recordOperation.type == ORecordOperation.UPDATED) {
final ORecord record = recordOperation.getRecord();
if (record instanceof ODocument) {
((ODocument) record).validate();
}
}
if (recordOperation.type == ORecordOperation.UPDATED || recordOperation.type == ORecordOperation.DELETED) {
final int clusterId = recordOperation.getRecord().getIdentity().getClusterId();
clustersToLock.put(clusterId, getClusterById(clusterId));
} else if (recordOperation.type == ORecordOperation.CREATED) {
newRecords.add(recordOperation);
final ORecord record = recordOperation.getRecord();
final ORID rid = record.getIdentity();
int clusterId = rid.getClusterId();
if (record.isDirty() && clusterId == ORID.CLUSTER_ID_INVALID && record instanceof ODocument) {
// TRY TO FIX CLUSTER ID TO THE DEFAULT CLUSTER ID DEFINED IN SCHEMA CLASS
final OImmutableClass class_ = ODocumentInternal.getImmutableSchemaClass(((ODocument) record));
if (class_ != null) {
clusterId = class_.getClusterForNewInstance((ODocument) record);
clusterOverrides.put(recordOperation, clusterId);
}
}
clustersToLock.put(clusterId, getClusterById(clusterId));
}
}
final List<ORecordOperation> result = new ArrayList<>(8);
stateLock.acquireReadLock();
try {
if (modificationLock) {
final List<ORID> recordLocks = new ArrayList<>();
for (final ORecordOperation recordOperation : recordOperations) {
if (recordOperation.type == ORecordOperation.UPDATED || recordOperation.type == ORecordOperation.DELETED) {
recordLocks.add(recordOperation.getRID());
}
}
final Set<ORID> locked = transaction.getLockedRecords();
if (locked != null) {
recordLocks.removeAll(locked);
}
Collections.sort(recordLocks);
for (final ORID rid : recordLocks) {
acquireWriteLock(rid);
}
}
try {
checkOpenness();
makeStorageDirty();
boolean rollback = false;
startStorageTx(transaction);
try {
final OAtomicOperation atomicOperation = OAtomicOperationsManager.getCurrentOperation();
lockClusters(clustersToLock);
checkReadOnlyConditions();
final Map<ORecordOperation, OPhysicalPosition> positions = new IdentityHashMap<>(8);
for (final ORecordOperation recordOperation : newRecords) {
final ORecord rec = recordOperation.getRecord();
if (allocated) {
if (rec.getIdentity().isPersistent()) {
positions.put(recordOperation, new OPhysicalPosition(rec.getIdentity().getClusterPosition()));
} else {
throw new OStorageException("Impossible to commit a transaction with not valid rid in pre-allocated commit");
}
} else if (rec.isDirty() && !rec.getIdentity().isPersistent()) {
final ORecordId rid = (ORecordId) rec.getIdentity().copy();
final ORecordId oldRID = rid.copy();
final Integer clusterOverride = clusterOverrides.get(recordOperation);
final int clusterId = clusterOverride == null ? rid.getClusterId() : clusterOverride;
final OCluster cluster = getClusterById(clusterId);
assert atomicOperation.getCounter() == 1;
OPhysicalPosition physicalPosition = cluster.allocatePosition(ORecordInternal.getRecordType(rec));
assert atomicOperation.getCounter() == 1;
rid.setClusterId(cluster.getId());
if (rid.getClusterPosition() > -1) {
// CREATE EMPTY RECORDS UNTIL THE POSITION IS REACHED. THIS IS THE CASE WHEN A SERVER IS OUT OF SYNC
// BECAUSE A TRANSACTION HAS BEEN ROLLED BACK BEFORE TO SEND THE REMOTE CREATES. SO THE OWNER NODE DELETED
// RECORD HAVING A HIGHER CLUSTER POSITION
while (rid.getClusterPosition() > physicalPosition.clusterPosition) {
assert atomicOperation.getCounter() == 1;
physicalPosition = cluster.allocatePosition(ORecordInternal.getRecordType(rec));
assert atomicOperation.getCounter() == 1;
}
if (rid.getClusterPosition() != physicalPosition.clusterPosition) {
throw new OConcurrentCreateException(rid, new ORecordId(rid.getClusterId(), physicalPosition.clusterPosition));
}
}
positions.put(recordOperation, physicalPosition);
rid.setClusterPosition(physicalPosition.clusterPosition);
transaction.updateIdentityAfterCommit(oldRID, rid);
}
}
lockRidBags(clustersToLock, indexOperations, indexManager);
checkReadOnlyConditions();
for (final ORecordOperation recordOperation : recordOperations) {
assert atomicOperation.getCounter() == 1;
commitEntry(recordOperation, positions.get(recordOperation), database.getSerializer());
assert atomicOperation.getCounter() == 1;
result.add(recordOperation);
}
lockIndexes(indexOperations);
checkReadOnlyConditions();
commitIndexes(indexOperations, atomicOperation);
} catch (final IOException | RuntimeException e) {
rollback = true;
if (e instanceof RuntimeException) {
throw ((RuntimeException) e);
} else {
throw OException.wrapException(new OStorageException("Error during transaction commit"), e);
}
} finally {
if (rollback) {
rollback(transaction);
} else {
endStorageTx(transaction, recordOperations);
}
this.transaction.set(null);
}
} finally {
atomicOperationsManager.ensureThatComponentsUnlocked();
database.getMetadata().clearThreadLocalSchemaSnapshot();
}
} finally {
try {
if (modificationLock) {
final List<ORID> recordLocks = new ArrayList<>();
for (final ORecordOperation recordOperation : recordOperations) {
if (recordOperation.type == ORecordOperation.UPDATED || recordOperation.type == ORecordOperation.DELETED) {
recordLocks.add(recordOperation.getRID());
}
}
final Set<ORID> locked = transaction.getLockedRecords();
if (locked != null) {
recordLocks.removeAll(locked);
}
for (final ORID rid : recordLocks) {
releaseWriteLock(rid);
}
}
} finally {
stateLock.releaseReadLock();
}
}
if (OLogManager.instance().isDebugEnabled()) {
OLogManager.instance()
.debug(this, "%d Committed transaction %d on database '%s' (result=%s)", Thread.currentThread().getId(),
transaction.getId(), database.getName(), result);
}
return result;
} catch (final RuntimeException ee) {
throw logAndPrepareForRethrow(ee);
} catch (final Error ee) {
handleJVMError(ee);
OAtomicOperationsManager.alarmClearOfAtomicOperation();
throw logAndPrepareForRethrow(ee);
} catch (final Throwable t) {
throw logAndPrepareForRethrow(t);
}
} } | public class class_name {
private List<ORecordOperation> commit(final OTransactionInternal transaction, final boolean allocated) {
// XXX: At this moment, there are two implementations of the commit method. One for regular client transactions and one for
// implicit micro-transactions. The implementations are quite identical, but operate on slightly different data. If you change
// this method don't forget to change its counterpart:
//
// OAbstractPaginatedStorage.commit(com.orientechnologies.orient.core.storage.impl.local.OMicroTransaction)
try {
checkOpenness(); // depends on control dependency: [try], data = [none]
checkLowDiskSpaceRequestsAndReadOnlyConditions(); // depends on control dependency: [try], data = [none]
txBegun.incrementAndGet(); // depends on control dependency: [try], data = [none]
final ODatabaseDocumentInternal database = transaction.getDatabase();
final OIndexManager indexManager = database.getMetadata().getIndexManager();
final TreeMap<String, OTransactionIndexChanges> indexOperations = getSortedIndexOperations(transaction);
database.getMetadata().makeThreadLocalSchemaSnapshot(); // depends on control dependency: [try], data = [none]
final Collection<ORecordOperation> recordOperations = transaction.getRecordOperations();
final TreeMap<Integer, OCluster> clustersToLock = new TreeMap<>();
final Map<ORecordOperation, Integer> clusterOverrides = new IdentityHashMap<>(8);
final Set<ORecordOperation> newRecords = new TreeSet<>(COMMIT_RECORD_OPERATION_COMPARATOR);
for (final ORecordOperation recordOperation : recordOperations) {
if (recordOperation.type == ORecordOperation.CREATED || recordOperation.type == ORecordOperation.UPDATED) {
final ORecord record = recordOperation.getRecord();
if (record instanceof ODocument) {
((ODocument) record).validate(); // depends on control dependency: [if], data = [none]
}
}
if (recordOperation.type == ORecordOperation.UPDATED || recordOperation.type == ORecordOperation.DELETED) {
final int clusterId = recordOperation.getRecord().getIdentity().getClusterId();
clustersToLock.put(clusterId, getClusterById(clusterId)); // depends on control dependency: [if], data = [none]
} else if (recordOperation.type == ORecordOperation.CREATED) {
newRecords.add(recordOperation); // depends on control dependency: [if], data = [none]
final ORecord record = recordOperation.getRecord();
final ORID rid = record.getIdentity();
int clusterId = rid.getClusterId();
if (record.isDirty() && clusterId == ORID.CLUSTER_ID_INVALID && record instanceof ODocument) {
// TRY TO FIX CLUSTER ID TO THE DEFAULT CLUSTER ID DEFINED IN SCHEMA CLASS
final OImmutableClass class_ = ODocumentInternal.getImmutableSchemaClass(((ODocument) record));
if (class_ != null) {
clusterId = class_.getClusterForNewInstance((ODocument) record); // depends on control dependency: [if], data = [none]
clusterOverrides.put(recordOperation, clusterId); // depends on control dependency: [if], data = [none]
}
}
clustersToLock.put(clusterId, getClusterById(clusterId)); // depends on control dependency: [if], data = [none]
}
}
final List<ORecordOperation> result = new ArrayList<>(8);
stateLock.acquireReadLock(); // depends on control dependency: [try], data = [none]
try {
if (modificationLock) {
final List<ORID> recordLocks = new ArrayList<>();
for (final ORecordOperation recordOperation : recordOperations) {
if (recordOperation.type == ORecordOperation.UPDATED || recordOperation.type == ORecordOperation.DELETED) {
recordLocks.add(recordOperation.getRID()); // depends on control dependency: [if], data = [none]
}
}
final Set<ORID> locked = transaction.getLockedRecords();
if (locked != null) {
recordLocks.removeAll(locked); // depends on control dependency: [if], data = [(locked]
}
Collections.sort(recordLocks); // depends on control dependency: [if], data = [none]
for (final ORID rid : recordLocks) {
acquireWriteLock(rid); // depends on control dependency: [for], data = [rid]
}
}
try {
checkOpenness(); // depends on control dependency: [try], data = [none]
makeStorageDirty(); // depends on control dependency: [try], data = [none]
boolean rollback = false;
startStorageTx(transaction); // depends on control dependency: [try], data = [none]
try {
final OAtomicOperation atomicOperation = OAtomicOperationsManager.getCurrentOperation();
lockClusters(clustersToLock); // depends on control dependency: [try], data = [none]
checkReadOnlyConditions(); // depends on control dependency: [try], data = [none]
final Map<ORecordOperation, OPhysicalPosition> positions = new IdentityHashMap<>(8);
for (final ORecordOperation recordOperation : newRecords) {
final ORecord rec = recordOperation.getRecord();
if (allocated) {
if (rec.getIdentity().isPersistent()) {
positions.put(recordOperation, new OPhysicalPosition(rec.getIdentity().getClusterPosition())); // depends on control dependency: [if], data = [none]
} else {
throw new OStorageException("Impossible to commit a transaction with not valid rid in pre-allocated commit");
}
} else if (rec.isDirty() && !rec.getIdentity().isPersistent()) {
final ORecordId rid = (ORecordId) rec.getIdentity().copy();
final ORecordId oldRID = rid.copy();
final Integer clusterOverride = clusterOverrides.get(recordOperation);
final int clusterId = clusterOverride == null ? rid.getClusterId() : clusterOverride;
final OCluster cluster = getClusterById(clusterId);
assert atomicOperation.getCounter() == 1;
OPhysicalPosition physicalPosition = cluster.allocatePosition(ORecordInternal.getRecordType(rec));
assert atomicOperation.getCounter() == 1;
rid.setClusterId(cluster.getId()); // depends on control dependency: [if], data = [none]
if (rid.getClusterPosition() > -1) {
// CREATE EMPTY RECORDS UNTIL THE POSITION IS REACHED. THIS IS THE CASE WHEN A SERVER IS OUT OF SYNC
// BECAUSE A TRANSACTION HAS BEEN ROLLED BACK BEFORE TO SEND THE REMOTE CREATES. SO THE OWNER NODE DELETED
// RECORD HAVING A HIGHER CLUSTER POSITION
while (rid.getClusterPosition() > physicalPosition.clusterPosition) {
assert atomicOperation.getCounter() == 1;
physicalPosition = cluster.allocatePosition(ORecordInternal.getRecordType(rec)); // depends on control dependency: [while], data = [none]
assert atomicOperation.getCounter() == 1;
}
if (rid.getClusterPosition() != physicalPosition.clusterPosition) {
throw new OConcurrentCreateException(rid, new ORecordId(rid.getClusterId(), physicalPosition.clusterPosition));
}
}
positions.put(recordOperation, physicalPosition); // depends on control dependency: [if], data = [none]
rid.setClusterPosition(physicalPosition.clusterPosition); // depends on control dependency: [if], data = [none]
transaction.updateIdentityAfterCommit(oldRID, rid); // depends on control dependency: [if], data = [none]
}
}
lockRidBags(clustersToLock, indexOperations, indexManager); // depends on control dependency: [try], data = [none]
checkReadOnlyConditions(); // depends on control dependency: [try], data = [none]
for (final ORecordOperation recordOperation : recordOperations) {
assert atomicOperation.getCounter() == 1;
commitEntry(recordOperation, positions.get(recordOperation), database.getSerializer()); // depends on control dependency: [for], data = [recordOperation]
assert atomicOperation.getCounter() == 1;
result.add(recordOperation); // depends on control dependency: [for], data = [recordOperation]
}
lockIndexes(indexOperations); // depends on control dependency: [try], data = [none]
checkReadOnlyConditions(); // depends on control dependency: [try], data = [none]
commitIndexes(indexOperations, atomicOperation); // depends on control dependency: [try], data = [none]
} catch (final IOException | RuntimeException e) {
rollback = true;
if (e instanceof RuntimeException) {
throw ((RuntimeException) e);
} else {
throw OException.wrapException(new OStorageException("Error during transaction commit"), e);
}
} finally { // depends on control dependency: [catch], data = [none]
if (rollback) {
rollback(transaction); // depends on control dependency: [if], data = [none]
} else {
endStorageTx(transaction, recordOperations); // depends on control dependency: [if], data = [none]
}
this.transaction.set(null);
}
} finally {
atomicOperationsManager.ensureThatComponentsUnlocked();
database.getMetadata().clearThreadLocalSchemaSnapshot();
}
} finally {
try {
if (modificationLock) {
final List<ORID> recordLocks = new ArrayList<>();
for (final ORecordOperation recordOperation : recordOperations) {
if (recordOperation.type == ORecordOperation.UPDATED || recordOperation.type == ORecordOperation.DELETED) {
recordLocks.add(recordOperation.getRID()); // depends on control dependency: [if], data = [none]
}
}
final Set<ORID> locked = transaction.getLockedRecords();
if (locked != null) {
recordLocks.removeAll(locked); // depends on control dependency: [if], data = [(locked]
}
for (final ORID rid : recordLocks) {
releaseWriteLock(rid); // depends on control dependency: [for], data = [rid]
}
}
} finally {
stateLock.releaseReadLock();
}
}
if (OLogManager.instance().isDebugEnabled()) {
OLogManager.instance()
.debug(this, "%d Committed transaction %d on database '%s' (result=%s)", Thread.currentThread().getId(),
transaction.getId(), database.getName(), result); // depends on control dependency: [if], data = [none]
}
return result; // depends on control dependency: [try], data = [none]
} catch (final RuntimeException ee) {
throw logAndPrepareForRethrow(ee);
} catch (final Error ee) { // depends on control dependency: [catch], data = [none]
handleJVMError(ee);
OAtomicOperationsManager.alarmClearOfAtomicOperation();
throw logAndPrepareForRethrow(ee);
} catch (final Throwable t) { // depends on control dependency: [catch], data = [none]
throw logAndPrepareForRethrow(t);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static String createXMLFeatureDescriptor(
final TrainingParameters params) throws IOException {
final Element aggGenerators = new Element("generators");
final Document doc = new Document(aggGenerators);
// <generators>
// <cache>
// <generators>
final Element cached = new Element("cache");
final Element generators = new Element("generators");
// <window prevLength="2" nextLength="2">
// <token />
// </window>
if (Flags.isTokenFeature(params)) {
setWindow(params);
final String tokenFeatureRange = Flags.getTokenFeaturesRange(params);
final Element tokenFeature = new Element("custom");
tokenFeature.setAttribute("class", TokenFeatureGenerator.class.getName());
tokenFeature.setAttribute("range", tokenFeatureRange);
final Element tokenWindow = new Element("window");
tokenWindow.setAttribute("prevLength", Integer.toString(leftWindow));
tokenWindow.setAttribute("nextLength", Integer.toString(rightWindow));
tokenWindow.addContent(tokenFeature);
generators.addContent(tokenWindow);
System.err.println("-> Token features added!: Window range " + leftWindow
+ ":" + rightWindow);
}
if (Flags.isTokenClassFeature(params)) {
setWindow(params);
final String tokenClassFeatureRange = Flags
.getTokenClassFeaturesRange(params);
final Element tokenClassFeature = new Element("custom");
tokenClassFeature.setAttribute("class",
TokenClassFeatureGenerator.class.getName());
tokenClassFeature.setAttribute("range", tokenClassFeatureRange);
final Element tokenClassWindow = new Element("window");
tokenClassWindow.setAttribute("prevLength", Integer.toString(leftWindow));
tokenClassWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
tokenClassWindow.addContent(tokenClassFeature);
generators.addContent(tokenClassWindow);
System.err.println("-> Token Class Features added!: Window range "
+ leftWindow + ":" + rightWindow);
}
if (Flags.isWordShapeSuperSenseFeature(params)) {
setWindow(params);
final Element wordShapeSuperSenseFeature = new Element("custom");
wordShapeSuperSenseFeature.setAttribute("class",
WordShapeSuperSenseFeatureGenerator.class.getName());
final Element wordShapeWindow = new Element("window");
wordShapeWindow.setAttribute("prevLength", Integer.toString(leftWindow));
wordShapeWindow.setAttribute("nextLength", Integer.toString(rightWindow));
wordShapeWindow.addContent(wordShapeSuperSenseFeature);
generators.addContent(wordShapeWindow);
System.err
.println("-> Word Shape SuperSense Features added!: Window range "
+ leftWindow + ":" + rightWindow);
}
if (Flags.isOutcomePriorFeature(params)) {
final Element outcomePriorFeature = new Element("custom");
outcomePriorFeature.setAttribute("class",
OutcomePriorFeatureGenerator.class.getName());
generators.addContent(outcomePriorFeature);
System.err.println("-> Outcome Prior Features added!");
}
if (Flags.isPreviousMapFeature(params)) {
final Element previousMapFeature = new Element("custom");
previousMapFeature.setAttribute("class",
PreviousMapFeatureGenerator.class.getName());
generators.addContent(previousMapFeature);
System.err.println("-> Previous Map Features added!");
}
if (Flags.isSentenceFeature(params)) {
final String beginSentence = Flags.getSentenceFeaturesBegin(params);
final String endSentence = Flags.getSentenceFeaturesEnd(params);
final Element sentenceFeature = new Element("custom");
sentenceFeature.setAttribute("class",
SentenceFeatureGenerator.class.getName());
sentenceFeature.setAttribute("begin", beginSentence);
sentenceFeature.setAttribute("end", endSentence);
generators.addContent(sentenceFeature);
System.err.println("-> Sentence Features added!");
}
if (Flags.isPrefixFeature(params)) {
final String beginPrefix = Flags.getPrefixFeaturesBegin(params);
final String endPrefix = Flags.getPrefixFeaturesEnd(params);
final Element prefixFeature = new Element("custom");
prefixFeature.setAttribute("class",
PrefixFeatureGenerator.class.getName());
prefixFeature.setAttribute("begin", beginPrefix);
prefixFeature.setAttribute("end", endPrefix);
generators.addContent(prefixFeature);
System.err.println("-> Prefix Features added!");
}
if (Flags.isSuffixFeature(params)) {
final String beginSuffix = Flags.getSuffixFeaturesBegin(params);
final String endSuffix = Flags.getSuffixFeaturesEnd(params);
final Element suffixFeature = new Element("custom");
suffixFeature.setAttribute("class",
SuffixFeatureGenerator.class.getName());
suffixFeature.setAttribute("begin", beginSuffix);
suffixFeature.setAttribute("end", endSuffix);
generators.addContent(suffixFeature);
System.err.println("-> Suffix Features added!");
}
if (Flags.isBigramClassFeature(params)) {
final Element bigramFeature = new Element("custom");
bigramFeature.setAttribute("class",
BigramClassFeatureGenerator.class.getName());
generators.addContent(bigramFeature);
System.err.println("-> Bigram Class Features added!");
}
if (Flags.isTrigramClassFeature(params)) {
final Element trigramFeature = new Element("custom");
trigramFeature.setAttribute("class",
TrigramClassFeatureGenerator.class.getName());
generators.addContent(trigramFeature);
System.err.println("-> Trigram Class Features added!");
}
if (Flags.isFourgramClassFeature(params)) {
final Element fourgramFeature = new Element("custom");
fourgramFeature.setAttribute("class",
FourgramClassFeatureGenerator.class.getName());
generators.addContent(fourgramFeature);
System.err.println("-> Fourgram Class Features added!");
}
if (Flags.isFivegramClassFeature(params)) {
final Element fivegramFeature = new Element("custom");
fivegramFeature.setAttribute("class",
FivegramClassFeatureGenerator.class.getName());
generators.addContent(fivegramFeature);
System.err.println("-> Fivegram Class Features added!");
}
if (Flags.isCharNgramClassFeature(params)) {
final String charngramRange = Flags.getCharNgramFeaturesRange(params);
final String[] rangeArray = Flags.processNgramRange(charngramRange);
final Element charngramFeature = new Element("custom");
charngramFeature.setAttribute("class",
CharacterNgramFeatureGenerator.class.getName());
charngramFeature.setAttribute("minLength", rangeArray[0]);
charngramFeature.setAttribute("maxLength", rangeArray[1]);
generators.addContent(charngramFeature);
System.err.println("-> CharNgram Class Features added!");
}
// Dictionary Features
if (Flags.isDictionaryFeatures(params)) {
setWindow(params);
final String dictPath = Flags.getDictionaryFeatures(params);
final String seqCodec = Flags.getSequenceCodec(params);
final List<File> fileList = StringUtils.getFilesInDir(new File(dictPath));
for (final File dictFile : fileList) {
final Element dictFeatures = new Element("custom");
dictFeatures.setAttribute("class",
DictionaryFeatureGenerator.class.getName());
dictFeatures.setAttribute("dict",
IOUtils.normalizeLexiconName(dictFile.getName()));
dictFeatures.setAttribute("seqCodec", seqCodec);
final Element dictWindow = new Element("window");
dictWindow.setAttribute("prevLength", Integer.toString(leftWindow));
dictWindow.setAttribute("nextLength", Integer.toString(rightWindow));
dictWindow.addContent(dictFeatures);
generators.addContent(dictWindow);
}
System.err.println("-> Dictionary Features added!");
}
// Brown clustering features
if (Flags.isBrownFeatures(params)) {
setWindow(params);
// previous 2 maps features
final Element prev2MapFeature = new Element("custom");
prev2MapFeature.setAttribute("class",
Prev2MapFeatureGenerator.class.getName());
generators.addContent(prev2MapFeature);
// previous map and token feature (in window)
final Element prevMapTokenFeature = new Element("custom");
prevMapTokenFeature.setAttribute("class",
PreviousMapTokenFeatureGenerator.class.getName());
final Element prevMapTokenWindow = new Element("window");
prevMapTokenWindow.setAttribute("prevLength",
Integer.toString(leftWindow));
prevMapTokenWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
prevMapTokenWindow.addContent(prevMapTokenFeature);
generators.addContent(prevMapTokenWindow);
// brown clustering features
final String brownClusterPath = Flags.getBrownFeatures(params);
final List<File> brownClusterFiles = Flags
.getClusterLexiconFiles(brownClusterPath);
for (final File brownClusterFile : brownClusterFiles) {
// brown bigram class features
final Element brownBigramFeatures = new Element("custom");
brownBigramFeatures.setAttribute("class",
BrownBigramFeatureGenerator.class.getName());
brownBigramFeatures.setAttribute("dict",
IOUtils.normalizeLexiconName(brownClusterFile.getName()));
generators.addContent(brownBigramFeatures);
// brown token feature
final Element brownTokenFeature = new Element("custom");
brownTokenFeature.setAttribute("class",
BrownTokenFeatureGenerator.class.getName());
brownTokenFeature.setAttribute("dict",
IOUtils.normalizeLexiconName(brownClusterFile.getName()));
final Element brownTokenWindow = new Element("window");
brownTokenWindow.setAttribute("prevLength",
Integer.toString(leftWindow));
brownTokenWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
brownTokenWindow.addContent(brownTokenFeature);
generators.addContent(brownTokenWindow);
// brown token class feature
final Element brownTokenClassFeature = new Element("custom");
brownTokenClassFeature.setAttribute("class",
BrownTokenClassFeatureGenerator.class.getName());
brownTokenClassFeature.setAttribute("dict",
IOUtils.normalizeLexiconName(brownClusterFile.getName()));
final Element brownTokenClassWindow = new Element("window");
brownTokenClassWindow.setAttribute("prevLength",
Integer.toString(leftWindow));
brownTokenClassWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
brownTokenClassWindow.addContent(brownTokenClassFeature);
generators.addContent(brownTokenClassWindow);
}
System.err.println("-> Brown Cluster Features added!");
}
// Clark clustering features
if (Flags.isClarkFeatures(params)) {
setWindow(params);
final String clarkClusterPath = Flags.getClarkFeatures(params);
final List<File> clarkClusterFiles = Flags
.getClusterLexiconFiles(clarkClusterPath);
for (final File clarkCluster : clarkClusterFiles) {
final Element clarkFeatures = new Element("custom");
clarkFeatures.setAttribute("class",
ClarkFeatureGenerator.class.getName());
clarkFeatures.setAttribute("dict",
IOUtils.normalizeLexiconName(clarkCluster.getName()));
final Element clarkWindow = new Element("window");
clarkWindow.setAttribute("prevLength", Integer.toString(leftWindow));
clarkWindow.setAttribute("nextLength", Integer.toString(rightWindow));
clarkWindow.addContent(clarkFeatures);
generators.addContent(clarkWindow);
}
System.err.println("-> Clark Cluster Features added!");
}
// word2vec clustering features
if (Flags.isWord2VecClusterFeatures(params)) {
setWindow(params);
final String word2vecClusterPath = Flags
.getWord2VecClusterFeatures(params);
final List<File> word2vecClusterFiles = Flags
.getClusterLexiconFiles(word2vecClusterPath);
for (final File word2vecFile : word2vecClusterFiles) {
final Element word2vecClusterFeatures = new Element("custom");
word2vecClusterFeatures.setAttribute("class",
Word2VecClusterFeatureGenerator.class.getName());
word2vecClusterFeatures.setAttribute("dict",
IOUtils.normalizeLexiconName(word2vecFile.getName()));
final Element word2vecClusterWindow = new Element("window");
word2vecClusterWindow.setAttribute("prevLength",
Integer.toString(leftWindow));
word2vecClusterWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
word2vecClusterWindow.addContent(word2vecClusterFeatures);
generators.addContent(word2vecClusterWindow);
}
System.err.println("-> Word2Vec Clusters Features added!");
}
// Morphological features
if (Flags.isPOSTagModelFeatures(params)) {
setWindow(params);
final String posModelPath = Flags.getPOSTagModelFeatures(params);
final String posModelRange = Flags.getPOSTagModelFeaturesRange(params);
final Element posTagClassFeatureElement = new Element("custom");
posTagClassFeatureElement.setAttribute("class",
POSTagModelFeatureGenerator.class.getName());
posTagClassFeatureElement.setAttribute("model",
IOUtils.normalizeLexiconName(new File(posModelPath).getName()));
posTagClassFeatureElement.setAttribute("range", posModelRange);
final Element posTagClassFeatureWindow = new Element("window");
posTagClassFeatureWindow.setAttribute("prevLength",
Integer.toString(leftWindow));
posTagClassFeatureWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
posTagClassFeatureWindow.addContent(posTagClassFeatureElement);
generators.addContent(posTagClassFeatureWindow);
System.err.println("-> POSTagModel Features added!");
}
if (Flags.isPOSDictionaryFeatures(params)) {
setWindow(params);
final String posDictPath = Flags.getPOSDictionaryFeatures(params);
final Element posDictFeatures = new Element("custom");
posDictFeatures.setAttribute("class",
POSDictionaryFeatureGenerator.class.getName());
posDictFeatures.setAttribute("dict",
IOUtils.normalizeLexiconName(new File(posDictPath).getName()));
final Element posDictWindow = new Element("window");
posDictWindow.setAttribute("prevLength", Integer.toString(leftWindow));
posDictWindow.setAttribute("nextLength", Integer.toString(rightWindow));
posDictWindow.addContent(posDictFeatures);
generators.addContent(posDictWindow);
System.err.println("-> POSDictionary Features added!");
}
if (Flags.isLemmaModelFeatures(params)) {
setWindow(params);
final String lemmaModelPath = Flags.getLemmaModelFeatures(params);
final Element lemmaClassFeatureElement = new Element("custom");
lemmaClassFeatureElement.setAttribute("class",
LemmaModelFeatureGenerator.class.getName());
lemmaClassFeatureElement.setAttribute("model",
IOUtils.normalizeLexiconName(new File(lemmaModelPath).getName()));
final Element lemmaClassFeatureWindow = new Element("window");
lemmaClassFeatureWindow.setAttribute("prevLength",
Integer.toString(leftWindow));
lemmaClassFeatureWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
lemmaClassFeatureWindow.addContent(lemmaClassFeatureElement);
generators.addContent(lemmaClassFeatureWindow);
System.err.println("-> LemmaModel Features added!");
}
if (Flags.isLemmaDictionaryFeatures(params)) {
setWindow(params);
final String lemmaDictPath = Flags.getLemmaDictionaryFeatures(params);
final String[] lemmaDictResources = Flags
.getLemmaDictionaryResources(lemmaDictPath);
final Element lemmaClassFeatureElement = new Element("custom");
lemmaClassFeatureElement.setAttribute("class",
LemmaDictionaryFeatureGenerator.class.getName());
lemmaClassFeatureElement.setAttribute("model", IOUtils
.normalizeLexiconName(new File(lemmaDictResources[0]).getName()));
lemmaClassFeatureElement.setAttribute("dict", IOUtils
.normalizeLexiconName(new File(lemmaDictResources[1]).getName()));
final Element lemmaClassFeatureWindow = new Element("window");
lemmaClassFeatureWindow.setAttribute("prevLength",
Integer.toString(leftWindow));
lemmaClassFeatureWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
lemmaClassFeatureWindow.addContent(lemmaClassFeatureElement);
generators.addContent(lemmaClassFeatureWindow);
System.err.println("-> LemmaDictionary Features added!");
}
if (Flags.isMFSFeatures(params)) {
setWindow(params);
final String mfsPath = Flags.getMFSFeatures(params);
final String[] mfsResources = Flags.getMFSResources(mfsPath);
final String mfsRange = Flags.getMFSFeaturesRange(params);
final String seqCodec = Flags.getSequenceCodec(params);
final Element mfsClassFeatureElement = new Element("custom");
mfsClassFeatureElement.setAttribute("class",
MFSFeatureGenerator.class.getName());
mfsClassFeatureElement.setAttribute("model",
IOUtils.normalizeLexiconName(new File(mfsResources[0]).getName()));
mfsClassFeatureElement.setAttribute("dict",
IOUtils.normalizeLexiconName(new File(mfsResources[1]).getName()));
mfsClassFeatureElement.setAttribute("mfs",
IOUtils.normalizeLexiconName(new File(mfsResources[2]).getName()));
mfsClassFeatureElement.setAttribute("range", mfsRange);
mfsClassFeatureElement.setAttribute("seqCodec", seqCodec);
final Element mfsClassFeatureWindow = new Element("window");
mfsClassFeatureWindow.setAttribute("prevLength",
Integer.toString(leftWindow));
mfsClassFeatureWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
mfsClassFeatureWindow.addContent(mfsClassFeatureElement);
generators.addContent(mfsClassFeatureWindow);
System.err.println("-> MFS Features added");
}
if (Flags.isSuperSenseFeatures(params)) {
final String mfsPath = Flags.getSuperSenseFeatures(params);
final String[] mfsResources = Flags.getSuperSenseResources(mfsPath);
final String mfsRange = Flags.getSuperSenseFeaturesRange(params);
final String seqCodec = Flags.getSequenceCodec(params);
final Element mfsClassFeatureElement = new Element("custom");
mfsClassFeatureElement.setAttribute("class",
SuperSenseFeatureGenerator.class.getName());
mfsClassFeatureElement.setAttribute("model",
IOUtils.normalizeLexiconName(new File(mfsResources[0]).getName()));
mfsClassFeatureElement.setAttribute("dict",
IOUtils.normalizeLexiconName(new File(mfsResources[1]).getName()));
mfsClassFeatureElement.setAttribute("mfs",
IOUtils.normalizeLexiconName(new File(mfsResources[2]).getName()));
mfsClassFeatureElement.setAttribute("range", mfsRange);
mfsClassFeatureElement.setAttribute("seqCodec", seqCodec);
generators.addContent(mfsClassFeatureElement);
System.err.println("-> SuperSense Features added!");
}
if (Flags.isPOSBaselineFeatures(params)) {
final String beginPrefix = Flags.getPrefixBegin(params);
final String endPrefix = Flags.getPrefixEnd(params);
final String beginSuffix = Flags.getSuffixBegin(params);
final String endSuffix = Flags.getSuffixEnd(params);
final Element posFeatureElement = new Element("custom");
posFeatureElement.setAttribute("class",
POSBaselineContextGenerator.class.getName());
posFeatureElement.setAttribute("prefBegin", beginPrefix);
posFeatureElement.setAttribute("prefEnd", endPrefix);
posFeatureElement.setAttribute("sufBegin", beginSuffix);
posFeatureElement.setAttribute("sufEnd", endSuffix);
generators.addContent(posFeatureElement);
System.err.println("-> POS Baseline Context Generator added!");
}
if (Flags.isLemmaBaselineFeatures(params)) {
final String beginPrefix = Flags.getPrefixBegin(params);
final String endPrefix = Flags.getPrefixEnd(params);
final String beginSuffix = Flags.getSuffixBegin(params);
final String endSuffix = Flags.getSuffixEnd(params);
final String posModel = Flags.getLemmaBaselineFeatures(params);
final String lemmaRange = Flags.getLemmaBaselineFeaturesRange(params);
final Element lemmaFeatureElement = new Element("custom");
lemmaFeatureElement.setAttribute("class",
LemmaBaselineContextGenerator.class.getName());
lemmaFeatureElement.setAttribute("prefBegin", beginPrefix);
lemmaFeatureElement.setAttribute("prefEnd", endPrefix);
lemmaFeatureElement.setAttribute("sufBegin", beginSuffix);
lemmaFeatureElement.setAttribute("sufEnd", endSuffix);
lemmaFeatureElement.setAttribute("model",
IOUtils.normalizeLexiconName(new File(posModel).getName()));
lemmaFeatureElement.setAttribute("range", lemmaRange);
generators.addContent(lemmaFeatureElement);
System.err.println("-> Lemma Baseline Context Generator added!");
}
if (Flags.isChunkBaselineFeatures(params)) {
final String posModel = Flags.getChunkBaselineFeatures(params);
final Element chunkFeatureElement = new Element("custom");
chunkFeatureElement.setAttribute("class",
ChunkBaselineContextGenerator.class.getName());
chunkFeatureElement.setAttribute("model",
IOUtils.normalizeLexiconName(new File(posModel).getName()));
generators.addContent(chunkFeatureElement);
System.err.println("-> Chunk Baseline Context Generator added!");
}
if (Flags.isPredicateContextFeatures(params)) {
final String predicateContextFile = Flags
.getPredicateContextFeatures(params);
final Element predicateContextFeatureElement = new Element("custom");
predicateContextFeatureElement.setAttribute("class",
PredicateContextFeatureGenerator.class.getName());
predicateContextFeatureElement.setAttribute("dict", IOUtils
.normalizeLexiconName(new File(predicateContextFile).getName()));
generators.addContent(predicateContextFeatureElement);
System.err.println("-> Predicate Context Generator added!");
}
aggGenerators.addContent(cached);
cached.addContent(generators);
final XMLOutputter xmlOutput = new XMLOutputter();
final Format format = Format.getPrettyFormat();
xmlOutput.setFormat(format);
return xmlOutput.outputString(doc);
} } | public class class_name {
public static String createXMLFeatureDescriptor(
final TrainingParameters params) throws IOException {
final Element aggGenerators = new Element("generators");
final Document doc = new Document(aggGenerators);
// <generators>
// <cache>
// <generators>
final Element cached = new Element("cache");
final Element generators = new Element("generators");
// <window prevLength="2" nextLength="2">
// <token />
// </window>
if (Flags.isTokenFeature(params)) {
setWindow(params);
final String tokenFeatureRange = Flags.getTokenFeaturesRange(params);
final Element tokenFeature = new Element("custom");
tokenFeature.setAttribute("class", TokenFeatureGenerator.class.getName());
tokenFeature.setAttribute("range", tokenFeatureRange);
final Element tokenWindow = new Element("window");
tokenWindow.setAttribute("prevLength", Integer.toString(leftWindow));
tokenWindow.setAttribute("nextLength", Integer.toString(rightWindow));
tokenWindow.addContent(tokenFeature);
generators.addContent(tokenWindow);
System.err.println("-> Token features added!: Window range " + leftWindow
+ ":" + rightWindow);
}
if (Flags.isTokenClassFeature(params)) {
setWindow(params);
final String tokenClassFeatureRange = Flags
.getTokenClassFeaturesRange(params);
final Element tokenClassFeature = new Element("custom");
tokenClassFeature.setAttribute("class",
TokenClassFeatureGenerator.class.getName());
tokenClassFeature.setAttribute("range", tokenClassFeatureRange);
final Element tokenClassWindow = new Element("window");
tokenClassWindow.setAttribute("prevLength", Integer.toString(leftWindow));
tokenClassWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
tokenClassWindow.addContent(tokenClassFeature);
generators.addContent(tokenClassWindow);
System.err.println("-> Token Class Features added!: Window range "
+ leftWindow + ":" + rightWindow);
}
if (Flags.isWordShapeSuperSenseFeature(params)) {
setWindow(params);
final Element wordShapeSuperSenseFeature = new Element("custom");
wordShapeSuperSenseFeature.setAttribute("class",
WordShapeSuperSenseFeatureGenerator.class.getName());
final Element wordShapeWindow = new Element("window");
wordShapeWindow.setAttribute("prevLength", Integer.toString(leftWindow));
wordShapeWindow.setAttribute("nextLength", Integer.toString(rightWindow));
wordShapeWindow.addContent(wordShapeSuperSenseFeature);
generators.addContent(wordShapeWindow);
System.err
.println("-> Word Shape SuperSense Features added!: Window range "
+ leftWindow + ":" + rightWindow);
}
if (Flags.isOutcomePriorFeature(params)) {
final Element outcomePriorFeature = new Element("custom");
outcomePriorFeature.setAttribute("class",
OutcomePriorFeatureGenerator.class.getName());
generators.addContent(outcomePriorFeature);
System.err.println("-> Outcome Prior Features added!");
}
if (Flags.isPreviousMapFeature(params)) {
final Element previousMapFeature = new Element("custom");
previousMapFeature.setAttribute("class",
PreviousMapFeatureGenerator.class.getName());
generators.addContent(previousMapFeature);
System.err.println("-> Previous Map Features added!");
}
if (Flags.isSentenceFeature(params)) {
final String beginSentence = Flags.getSentenceFeaturesBegin(params);
final String endSentence = Flags.getSentenceFeaturesEnd(params);
final Element sentenceFeature = new Element("custom");
sentenceFeature.setAttribute("class",
SentenceFeatureGenerator.class.getName());
sentenceFeature.setAttribute("begin", beginSentence);
sentenceFeature.setAttribute("end", endSentence);
generators.addContent(sentenceFeature);
System.err.println("-> Sentence Features added!");
}
if (Flags.isPrefixFeature(params)) {
final String beginPrefix = Flags.getPrefixFeaturesBegin(params);
final String endPrefix = Flags.getPrefixFeaturesEnd(params);
final Element prefixFeature = new Element("custom");
prefixFeature.setAttribute("class",
PrefixFeatureGenerator.class.getName());
prefixFeature.setAttribute("begin", beginPrefix);
prefixFeature.setAttribute("end", endPrefix);
generators.addContent(prefixFeature);
System.err.println("-> Prefix Features added!");
}
if (Flags.isSuffixFeature(params)) {
final String beginSuffix = Flags.getSuffixFeaturesBegin(params);
final String endSuffix = Flags.getSuffixFeaturesEnd(params);
final Element suffixFeature = new Element("custom");
suffixFeature.setAttribute("class",
SuffixFeatureGenerator.class.getName());
suffixFeature.setAttribute("begin", beginSuffix);
suffixFeature.setAttribute("end", endSuffix);
generators.addContent(suffixFeature);
System.err.println("-> Suffix Features added!");
}
if (Flags.isBigramClassFeature(params)) {
final Element bigramFeature = new Element("custom");
bigramFeature.setAttribute("class",
BigramClassFeatureGenerator.class.getName());
generators.addContent(bigramFeature);
System.err.println("-> Bigram Class Features added!");
}
if (Flags.isTrigramClassFeature(params)) {
final Element trigramFeature = new Element("custom");
trigramFeature.setAttribute("class",
TrigramClassFeatureGenerator.class.getName());
generators.addContent(trigramFeature);
System.err.println("-> Trigram Class Features added!");
}
if (Flags.isFourgramClassFeature(params)) {
final Element fourgramFeature = new Element("custom");
fourgramFeature.setAttribute("class",
FourgramClassFeatureGenerator.class.getName());
generators.addContent(fourgramFeature);
System.err.println("-> Fourgram Class Features added!");
}
if (Flags.isFivegramClassFeature(params)) {
final Element fivegramFeature = new Element("custom");
fivegramFeature.setAttribute("class",
FivegramClassFeatureGenerator.class.getName());
generators.addContent(fivegramFeature);
System.err.println("-> Fivegram Class Features added!");
}
if (Flags.isCharNgramClassFeature(params)) {
final String charngramRange = Flags.getCharNgramFeaturesRange(params);
final String[] rangeArray = Flags.processNgramRange(charngramRange);
final Element charngramFeature = new Element("custom");
charngramFeature.setAttribute("class",
CharacterNgramFeatureGenerator.class.getName());
charngramFeature.setAttribute("minLength", rangeArray[0]);
charngramFeature.setAttribute("maxLength", rangeArray[1]);
generators.addContent(charngramFeature);
System.err.println("-> CharNgram Class Features added!");
}
// Dictionary Features
if (Flags.isDictionaryFeatures(params)) {
setWindow(params);
final String dictPath = Flags.getDictionaryFeatures(params);
final String seqCodec = Flags.getSequenceCodec(params);
final List<File> fileList = StringUtils.getFilesInDir(new File(dictPath));
for (final File dictFile : fileList) {
final Element dictFeatures = new Element("custom");
dictFeatures.setAttribute("class",
DictionaryFeatureGenerator.class.getName()); // depends on control dependency: [for], data = [none]
dictFeatures.setAttribute("dict",
IOUtils.normalizeLexiconName(dictFile.getName())); // depends on control dependency: [for], data = [none]
dictFeatures.setAttribute("seqCodec", seqCodec); // depends on control dependency: [for], data = [none]
final Element dictWindow = new Element("window");
dictWindow.setAttribute("prevLength", Integer.toString(leftWindow)); // depends on control dependency: [for], data = [none]
dictWindow.setAttribute("nextLength", Integer.toString(rightWindow)); // depends on control dependency: [for], data = [none]
dictWindow.addContent(dictFeatures); // depends on control dependency: [for], data = [none]
generators.addContent(dictWindow); // depends on control dependency: [for], data = [none]
}
System.err.println("-> Dictionary Features added!");
}
// Brown clustering features
if (Flags.isBrownFeatures(params)) {
setWindow(params);
// previous 2 maps features
final Element prev2MapFeature = new Element("custom");
prev2MapFeature.setAttribute("class",
Prev2MapFeatureGenerator.class.getName());
generators.addContent(prev2MapFeature);
// previous map and token feature (in window)
final Element prevMapTokenFeature = new Element("custom");
prevMapTokenFeature.setAttribute("class",
PreviousMapTokenFeatureGenerator.class.getName());
final Element prevMapTokenWindow = new Element("window");
prevMapTokenWindow.setAttribute("prevLength",
Integer.toString(leftWindow));
prevMapTokenWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
prevMapTokenWindow.addContent(prevMapTokenFeature);
generators.addContent(prevMapTokenWindow);
// brown clustering features
final String brownClusterPath = Flags.getBrownFeatures(params);
final List<File> brownClusterFiles = Flags
.getClusterLexiconFiles(brownClusterPath);
for (final File brownClusterFile : brownClusterFiles) {
// brown bigram class features
final Element brownBigramFeatures = new Element("custom");
brownBigramFeatures.setAttribute("class",
BrownBigramFeatureGenerator.class.getName()); // depends on control dependency: [for], data = [none]
brownBigramFeatures.setAttribute("dict",
IOUtils.normalizeLexiconName(brownClusterFile.getName())); // depends on control dependency: [for], data = [none]
generators.addContent(brownBigramFeatures); // depends on control dependency: [for], data = [none]
// brown token feature
final Element brownTokenFeature = new Element("custom");
brownTokenFeature.setAttribute("class",
BrownTokenFeatureGenerator.class.getName()); // depends on control dependency: [for], data = [none]
brownTokenFeature.setAttribute("dict",
IOUtils.normalizeLexiconName(brownClusterFile.getName())); // depends on control dependency: [for], data = [none]
final Element brownTokenWindow = new Element("window");
brownTokenWindow.setAttribute("prevLength",
Integer.toString(leftWindow)); // depends on control dependency: [for], data = [none]
brownTokenWindow.setAttribute("nextLength",
Integer.toString(rightWindow)); // depends on control dependency: [for], data = [none]
brownTokenWindow.addContent(brownTokenFeature); // depends on control dependency: [for], data = [none]
generators.addContent(brownTokenWindow); // depends on control dependency: [for], data = [none]
// brown token class feature
final Element brownTokenClassFeature = new Element("custom");
brownTokenClassFeature.setAttribute("class",
BrownTokenClassFeatureGenerator.class.getName()); // depends on control dependency: [for], data = [none]
brownTokenClassFeature.setAttribute("dict",
IOUtils.normalizeLexiconName(brownClusterFile.getName())); // depends on control dependency: [for], data = [none]
final Element brownTokenClassWindow = new Element("window");
brownTokenClassWindow.setAttribute("prevLength",
Integer.toString(leftWindow)); // depends on control dependency: [for], data = [none]
brownTokenClassWindow.setAttribute("nextLength",
Integer.toString(rightWindow)); // depends on control dependency: [for], data = [none]
brownTokenClassWindow.addContent(brownTokenClassFeature); // depends on control dependency: [for], data = [none]
generators.addContent(brownTokenClassWindow); // depends on control dependency: [for], data = [none]
}
System.err.println("-> Brown Cluster Features added!");
}
// Clark clustering features
if (Flags.isClarkFeatures(params)) {
setWindow(params);
final String clarkClusterPath = Flags.getClarkFeatures(params);
final List<File> clarkClusterFiles = Flags
.getClusterLexiconFiles(clarkClusterPath);
for (final File clarkCluster : clarkClusterFiles) {
final Element clarkFeatures = new Element("custom");
clarkFeatures.setAttribute("class",
ClarkFeatureGenerator.class.getName()); // depends on control dependency: [for], data = [none]
clarkFeatures.setAttribute("dict",
IOUtils.normalizeLexiconName(clarkCluster.getName())); // depends on control dependency: [for], data = [none]
final Element clarkWindow = new Element("window");
clarkWindow.setAttribute("prevLength", Integer.toString(leftWindow)); // depends on control dependency: [for], data = [none]
clarkWindow.setAttribute("nextLength", Integer.toString(rightWindow)); // depends on control dependency: [for], data = [none]
clarkWindow.addContent(clarkFeatures); // depends on control dependency: [for], data = [none]
generators.addContent(clarkWindow); // depends on control dependency: [for], data = [none]
}
System.err.println("-> Clark Cluster Features added!");
}
// word2vec clustering features
if (Flags.isWord2VecClusterFeatures(params)) {
setWindow(params);
final String word2vecClusterPath = Flags
.getWord2VecClusterFeatures(params);
final List<File> word2vecClusterFiles = Flags
.getClusterLexiconFiles(word2vecClusterPath);
for (final File word2vecFile : word2vecClusterFiles) {
final Element word2vecClusterFeatures = new Element("custom");
word2vecClusterFeatures.setAttribute("class",
Word2VecClusterFeatureGenerator.class.getName()); // depends on control dependency: [for], data = [none]
word2vecClusterFeatures.setAttribute("dict",
IOUtils.normalizeLexiconName(word2vecFile.getName())); // depends on control dependency: [for], data = [none]
final Element word2vecClusterWindow = new Element("window");
word2vecClusterWindow.setAttribute("prevLength",
Integer.toString(leftWindow)); // depends on control dependency: [for], data = [none]
word2vecClusterWindow.setAttribute("nextLength",
Integer.toString(rightWindow)); // depends on control dependency: [for], data = [none]
word2vecClusterWindow.addContent(word2vecClusterFeatures); // depends on control dependency: [for], data = [none]
generators.addContent(word2vecClusterWindow); // depends on control dependency: [for], data = [none]
}
System.err.println("-> Word2Vec Clusters Features added!");
}
// Morphological features
if (Flags.isPOSTagModelFeatures(params)) {
setWindow(params);
final String posModelPath = Flags.getPOSTagModelFeatures(params);
final String posModelRange = Flags.getPOSTagModelFeaturesRange(params);
final Element posTagClassFeatureElement = new Element("custom");
posTagClassFeatureElement.setAttribute("class",
POSTagModelFeatureGenerator.class.getName());
posTagClassFeatureElement.setAttribute("model",
IOUtils.normalizeLexiconName(new File(posModelPath).getName()));
posTagClassFeatureElement.setAttribute("range", posModelRange);
final Element posTagClassFeatureWindow = new Element("window");
posTagClassFeatureWindow.setAttribute("prevLength",
Integer.toString(leftWindow));
posTagClassFeatureWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
posTagClassFeatureWindow.addContent(posTagClassFeatureElement);
generators.addContent(posTagClassFeatureWindow);
System.err.println("-> POSTagModel Features added!");
}
if (Flags.isPOSDictionaryFeatures(params)) {
setWindow(params);
final String posDictPath = Flags.getPOSDictionaryFeatures(params);
final Element posDictFeatures = new Element("custom");
posDictFeatures.setAttribute("class",
POSDictionaryFeatureGenerator.class.getName());
posDictFeatures.setAttribute("dict",
IOUtils.normalizeLexiconName(new File(posDictPath).getName()));
final Element posDictWindow = new Element("window");
posDictWindow.setAttribute("prevLength", Integer.toString(leftWindow));
posDictWindow.setAttribute("nextLength", Integer.toString(rightWindow));
posDictWindow.addContent(posDictFeatures);
generators.addContent(posDictWindow);
System.err.println("-> POSDictionary Features added!");
}
if (Flags.isLemmaModelFeatures(params)) {
setWindow(params);
final String lemmaModelPath = Flags.getLemmaModelFeatures(params);
final Element lemmaClassFeatureElement = new Element("custom");
lemmaClassFeatureElement.setAttribute("class",
LemmaModelFeatureGenerator.class.getName());
lemmaClassFeatureElement.setAttribute("model",
IOUtils.normalizeLexiconName(new File(lemmaModelPath).getName()));
final Element lemmaClassFeatureWindow = new Element("window");
lemmaClassFeatureWindow.setAttribute("prevLength",
Integer.toString(leftWindow));
lemmaClassFeatureWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
lemmaClassFeatureWindow.addContent(lemmaClassFeatureElement);
generators.addContent(lemmaClassFeatureWindow);
System.err.println("-> LemmaModel Features added!");
}
if (Flags.isLemmaDictionaryFeatures(params)) {
setWindow(params);
final String lemmaDictPath = Flags.getLemmaDictionaryFeatures(params);
final String[] lemmaDictResources = Flags
.getLemmaDictionaryResources(lemmaDictPath);
final Element lemmaClassFeatureElement = new Element("custom");
lemmaClassFeatureElement.setAttribute("class",
LemmaDictionaryFeatureGenerator.class.getName());
lemmaClassFeatureElement.setAttribute("model", IOUtils
.normalizeLexiconName(new File(lemmaDictResources[0]).getName()));
lemmaClassFeatureElement.setAttribute("dict", IOUtils
.normalizeLexiconName(new File(lemmaDictResources[1]).getName()));
final Element lemmaClassFeatureWindow = new Element("window");
lemmaClassFeatureWindow.setAttribute("prevLength",
Integer.toString(leftWindow));
lemmaClassFeatureWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
lemmaClassFeatureWindow.addContent(lemmaClassFeatureElement);
generators.addContent(lemmaClassFeatureWindow);
System.err.println("-> LemmaDictionary Features added!");
}
if (Flags.isMFSFeatures(params)) {
setWindow(params);
final String mfsPath = Flags.getMFSFeatures(params);
final String[] mfsResources = Flags.getMFSResources(mfsPath);
final String mfsRange = Flags.getMFSFeaturesRange(params);
final String seqCodec = Flags.getSequenceCodec(params);
final Element mfsClassFeatureElement = new Element("custom");
mfsClassFeatureElement.setAttribute("class",
MFSFeatureGenerator.class.getName());
mfsClassFeatureElement.setAttribute("model",
IOUtils.normalizeLexiconName(new File(mfsResources[0]).getName()));
mfsClassFeatureElement.setAttribute("dict",
IOUtils.normalizeLexiconName(new File(mfsResources[1]).getName()));
mfsClassFeatureElement.setAttribute("mfs",
IOUtils.normalizeLexiconName(new File(mfsResources[2]).getName()));
mfsClassFeatureElement.setAttribute("range", mfsRange);
mfsClassFeatureElement.setAttribute("seqCodec", seqCodec);
final Element mfsClassFeatureWindow = new Element("window");
mfsClassFeatureWindow.setAttribute("prevLength",
Integer.toString(leftWindow));
mfsClassFeatureWindow.setAttribute("nextLength",
Integer.toString(rightWindow));
mfsClassFeatureWindow.addContent(mfsClassFeatureElement);
generators.addContent(mfsClassFeatureWindow);
System.err.println("-> MFS Features added");
}
if (Flags.isSuperSenseFeatures(params)) {
final String mfsPath = Flags.getSuperSenseFeatures(params);
final String[] mfsResources = Flags.getSuperSenseResources(mfsPath);
final String mfsRange = Flags.getSuperSenseFeaturesRange(params);
final String seqCodec = Flags.getSequenceCodec(params);
final Element mfsClassFeatureElement = new Element("custom");
mfsClassFeatureElement.setAttribute("class",
SuperSenseFeatureGenerator.class.getName());
mfsClassFeatureElement.setAttribute("model",
IOUtils.normalizeLexiconName(new File(mfsResources[0]).getName()));
mfsClassFeatureElement.setAttribute("dict",
IOUtils.normalizeLexiconName(new File(mfsResources[1]).getName()));
mfsClassFeatureElement.setAttribute("mfs",
IOUtils.normalizeLexiconName(new File(mfsResources[2]).getName()));
mfsClassFeatureElement.setAttribute("range", mfsRange);
mfsClassFeatureElement.setAttribute("seqCodec", seqCodec);
generators.addContent(mfsClassFeatureElement);
System.err.println("-> SuperSense Features added!");
}
if (Flags.isPOSBaselineFeatures(params)) {
final String beginPrefix = Flags.getPrefixBegin(params);
final String endPrefix = Flags.getPrefixEnd(params);
final String beginSuffix = Flags.getSuffixBegin(params);
final String endSuffix = Flags.getSuffixEnd(params);
final Element posFeatureElement = new Element("custom");
posFeatureElement.setAttribute("class",
POSBaselineContextGenerator.class.getName());
posFeatureElement.setAttribute("prefBegin", beginPrefix);
posFeatureElement.setAttribute("prefEnd", endPrefix);
posFeatureElement.setAttribute("sufBegin", beginSuffix);
posFeatureElement.setAttribute("sufEnd", endSuffix);
generators.addContent(posFeatureElement);
System.err.println("-> POS Baseline Context Generator added!");
}
if (Flags.isLemmaBaselineFeatures(params)) {
final String beginPrefix = Flags.getPrefixBegin(params);
final String endPrefix = Flags.getPrefixEnd(params);
final String beginSuffix = Flags.getSuffixBegin(params);
final String endSuffix = Flags.getSuffixEnd(params);
final String posModel = Flags.getLemmaBaselineFeatures(params);
final String lemmaRange = Flags.getLemmaBaselineFeaturesRange(params);
final Element lemmaFeatureElement = new Element("custom");
lemmaFeatureElement.setAttribute("class",
LemmaBaselineContextGenerator.class.getName());
lemmaFeatureElement.setAttribute("prefBegin", beginPrefix);
lemmaFeatureElement.setAttribute("prefEnd", endPrefix);
lemmaFeatureElement.setAttribute("sufBegin", beginSuffix);
lemmaFeatureElement.setAttribute("sufEnd", endSuffix);
lemmaFeatureElement.setAttribute("model",
IOUtils.normalizeLexiconName(new File(posModel).getName()));
lemmaFeatureElement.setAttribute("range", lemmaRange);
generators.addContent(lemmaFeatureElement);
System.err.println("-> Lemma Baseline Context Generator added!");
}
if (Flags.isChunkBaselineFeatures(params)) {
final String posModel = Flags.getChunkBaselineFeatures(params);
final Element chunkFeatureElement = new Element("custom");
chunkFeatureElement.setAttribute("class",
ChunkBaselineContextGenerator.class.getName());
chunkFeatureElement.setAttribute("model",
IOUtils.normalizeLexiconName(new File(posModel).getName()));
generators.addContent(chunkFeatureElement);
System.err.println("-> Chunk Baseline Context Generator added!");
}
if (Flags.isPredicateContextFeatures(params)) {
final String predicateContextFile = Flags
.getPredicateContextFeatures(params);
final Element predicateContextFeatureElement = new Element("custom");
predicateContextFeatureElement.setAttribute("class",
PredicateContextFeatureGenerator.class.getName());
predicateContextFeatureElement.setAttribute("dict", IOUtils
.normalizeLexiconName(new File(predicateContextFile).getName()));
generators.addContent(predicateContextFeatureElement);
System.err.println("-> Predicate Context Generator added!");
}
aggGenerators.addContent(cached);
cached.addContent(generators);
final XMLOutputter xmlOutput = new XMLOutputter();
final Format format = Format.getPrettyFormat();
xmlOutput.setFormat(format);
return xmlOutput.outputString(doc);
} } |
public class class_name {
private static double interpolate(double lower, double upper, double remainder, double scale) {
if (lower == NEGATIVE_INFINITY) {
if (upper == POSITIVE_INFINITY) {
// Return NaN when lower == NEGATIVE_INFINITY and upper == POSITIVE_INFINITY:
return NaN;
}
// Return NEGATIVE_INFINITY when NEGATIVE_INFINITY == lower <= upper < POSITIVE_INFINITY:
return NEGATIVE_INFINITY;
}
if (upper == POSITIVE_INFINITY) {
// Return POSITIVE_INFINITY when NEGATIVE_INFINITY < lower <= upper == POSITIVE_INFINITY:
return POSITIVE_INFINITY;
}
return lower + (upper - lower) * remainder / scale;
} } | public class class_name {
private static double interpolate(double lower, double upper, double remainder, double scale) {
if (lower == NEGATIVE_INFINITY) {
if (upper == POSITIVE_INFINITY) {
// Return NaN when lower == NEGATIVE_INFINITY and upper == POSITIVE_INFINITY:
return NaN; // depends on control dependency: [if], data = [none]
}
// Return NEGATIVE_INFINITY when NEGATIVE_INFINITY == lower <= upper < POSITIVE_INFINITY:
return NEGATIVE_INFINITY; // depends on control dependency: [if], data = [none]
}
if (upper == POSITIVE_INFINITY) {
// Return POSITIVE_INFINITY when NEGATIVE_INFINITY < lower <= upper == POSITIVE_INFINITY:
return POSITIVE_INFINITY; // depends on control dependency: [if], data = [none]
}
return lower + (upper - lower) * remainder / scale;
} } |
public class class_name {
public static List<Audit> findAll(EntityManager em) {
requireArgument(em != null, "Entity manager cannot be null.");
TypedQuery<Audit> query = em.createNamedQuery("Audit.findAll", Audit.class);
try {
return query.getResultList();
} catch (NoResultException ex) {
return new ArrayList<Audit>(0);
}
} } | public class class_name {
public static List<Audit> findAll(EntityManager em) {
requireArgument(em != null, "Entity manager cannot be null.");
TypedQuery<Audit> query = em.createNamedQuery("Audit.findAll", Audit.class);
try {
return query.getResultList(); // depends on control dependency: [try], data = [none]
} catch (NoResultException ex) {
return new ArrayList<Audit>(0);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public DashPackage withPeriodTriggers(String... periodTriggers) {
if (this.periodTriggers == null) {
setPeriodTriggers(new java.util.ArrayList<String>(periodTriggers.length));
}
for (String ele : periodTriggers) {
this.periodTriggers.add(ele);
}
return this;
} } | public class class_name {
public DashPackage withPeriodTriggers(String... periodTriggers) {
if (this.periodTriggers == null) {
setPeriodTriggers(new java.util.ArrayList<String>(periodTriggers.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : periodTriggers) {
this.periodTriggers.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public String getAbsoluteName() {
final StringBuilder absolute = new StringBuilder();
if (parent != null) {
absolute.append(parent).append(ENTRY_SEPARATOR);
}
absolute.append(local);
return absolute.toString();
} } | public class class_name {
public String getAbsoluteName() {
final StringBuilder absolute = new StringBuilder();
if (parent != null) {
absolute.append(parent).append(ENTRY_SEPARATOR); // depends on control dependency: [if], data = [(parent]
}
absolute.append(local);
return absolute.toString();
} } |
public class class_name {
private void writeConditions(List<Condition> conditions)
throws JsonGenerationException, IOException {
Map<String, ConditionsByKey> conditionsByType = groupConditionsByTypeAndKey(conditions);
writeJsonObjectStart(JsonDocumentFields.CONDITION);
ConditionsByKey conditionsByKey;
for (Map.Entry<String, ConditionsByKey> entry : conditionsByType
.entrySet()) {
conditionsByKey = conditionsByType.get(entry.getKey());
writeJsonObjectStart(entry.getKey());
for (String key : conditionsByKey.keySet()) {
writeJsonArray(key, conditionsByKey.getConditionsByKey(key));
}
writeJsonObjectEnd();
}
writeJsonObjectEnd();
} } | public class class_name {
private void writeConditions(List<Condition> conditions)
throws JsonGenerationException, IOException {
Map<String, ConditionsByKey> conditionsByType = groupConditionsByTypeAndKey(conditions);
writeJsonObjectStart(JsonDocumentFields.CONDITION);
ConditionsByKey conditionsByKey;
for (Map.Entry<String, ConditionsByKey> entry : conditionsByType
.entrySet()) {
conditionsByKey = conditionsByType.get(entry.getKey());
writeJsonObjectStart(entry.getKey());
for (String key : conditionsByKey.keySet()) {
writeJsonArray(key, conditionsByKey.getConditionsByKey(key)); // depends on control dependency: [for], data = [key]
}
writeJsonObjectEnd();
}
writeJsonObjectEnd();
} } |
public class class_name {
@Override
public Map<RuleSet, List<Rule>> matching(Rules rules,
Map<String, Object> vars,
boolean ignoreMethodErrors) throws OgnlException
{
Map<RuleSet, List<Rule>> ret = new HashMap<>();
for (RuleSet ruleSet : rules.ruleSets)
{
try
{
OgnlContext context = createContext(vars);
List<Rule> matching = match(ruleSet, context);
if (!matching.isEmpty())
{
ret.put(ruleSet, matching);
}
}
catch (MethodFailedException mfe)
{
if (!ignoreMethodErrors)
{
throw mfe;
}
log.warn("Method failed for ruleset " + ruleSet.id, mfe);
}
}
return ret;
} } | public class class_name {
@Override
public Map<RuleSet, List<Rule>> matching(Rules rules,
Map<String, Object> vars,
boolean ignoreMethodErrors) throws OgnlException
{
Map<RuleSet, List<Rule>> ret = new HashMap<>();
for (RuleSet ruleSet : rules.ruleSets)
{
try
{
OgnlContext context = createContext(vars);
List<Rule> matching = match(ruleSet, context);
if (!matching.isEmpty())
{
ret.put(ruleSet, matching); // depends on control dependency: [if], data = [none]
}
}
catch (MethodFailedException mfe)
{
if (!ignoreMethodErrors)
{
throw mfe;
}
log.warn("Method failed for ruleset " + ruleSet.id, mfe);
} // depends on control dependency: [catch], data = [none]
}
return ret;
} } |
public class class_name {
public static int randomSegment(int[] probability) {
int total = 0;
for (int i = 0; i < probability.length; i++) {
total += probability[i];
probability[i] = total;
}
int rand = (int) random(0, total - 1);
for (int i = 0; i < probability.length; i++) {
if (rand < probability[i]) {
return i;
}
}
return -1;
} } | public class class_name {
public static int randomSegment(int[] probability) {
int total = 0;
for (int i = 0; i < probability.length; i++) {
total += probability[i];
// depends on control dependency: [for], data = [i]
probability[i] = total;
// depends on control dependency: [for], data = [i]
}
int rand = (int) random(0, total - 1);
for (int i = 0; i < probability.length; i++) {
if (rand < probability[i]) {
return i;
// depends on control dependency: [if], data = [none]
}
}
return -1;
} } |
public class class_name {
public void shutdown() {
Iterator<ExclusionFilterFactory> itr = factories.iterator();
while (itr.hasNext()) {
ExclusionFilterFactory i = itr.next();
i.shutdown();
}
} } | public class class_name {
public void shutdown() {
Iterator<ExclusionFilterFactory> itr = factories.iterator();
while (itr.hasNext()) {
ExclusionFilterFactory i = itr.next();
i.shutdown(); // depends on control dependency: [while], data = [none]
}
} } |
public class class_name {
public static boolean isFinalVariable(DeclarationExpression declarationExpression, SourceCode sourceCode) {
if (isFromGeneratedSourceCode(declarationExpression)) {
return false;
}
List<Expression> variableExpressions = getVariableExpressions(declarationExpression);
if (!variableExpressions.isEmpty()) {
Expression variableExpression = variableExpressions.get(0);
int startOfDeclaration = declarationExpression.getColumnNumber();
int startOfVariableName = variableExpression.getColumnNumber();
int sourceLineNumber = findFirstNonAnnotationLine(declarationExpression, sourceCode);
String sourceLine = sourceCode.getLines().get(sourceLineNumber-1);
String modifiers = (startOfDeclaration >= 0 && startOfVariableName >= 0 && sourceLine.length() >= startOfVariableName) ?
sourceLine.substring(startOfDeclaration - 1, startOfVariableName - 1) : "";
return modifiers.contains("final");
}
return false;
} } | public class class_name {
public static boolean isFinalVariable(DeclarationExpression declarationExpression, SourceCode sourceCode) {
if (isFromGeneratedSourceCode(declarationExpression)) {
return false;
// depends on control dependency: [if], data = [none]
}
List<Expression> variableExpressions = getVariableExpressions(declarationExpression);
if (!variableExpressions.isEmpty()) {
Expression variableExpression = variableExpressions.get(0);
int startOfDeclaration = declarationExpression.getColumnNumber();
int startOfVariableName = variableExpression.getColumnNumber();
int sourceLineNumber = findFirstNonAnnotationLine(declarationExpression, sourceCode);
String sourceLine = sourceCode.getLines().get(sourceLineNumber-1);
String modifiers = (startOfDeclaration >= 0 && startOfVariableName >= 0 && sourceLine.length() >= startOfVariableName) ?
sourceLine.substring(startOfDeclaration - 1, startOfVariableName - 1) : "";
return modifiers.contains("final");
}
return false;
} } |
public class class_name {
private String readResourceAsString(Resource resource) throws IOException {
StringBuilder contents = new StringBuilder();
// Read entire resource into StringBuilder one chunk at a time
Reader reader = new InputStreamReader(resource.asStream(), "UTF-8");
try {
char buffer[] = new char[8192];
int length;
while ((length = reader.read(buffer)) != -1) {
contents.append(buffer, 0, length);
}
}
// Ensure resource is always closed
finally {
reader.close();
}
return contents.toString();
} } | public class class_name {
private String readResourceAsString(Resource resource) throws IOException {
StringBuilder contents = new StringBuilder();
// Read entire resource into StringBuilder one chunk at a time
Reader reader = new InputStreamReader(resource.asStream(), "UTF-8");
try {
char buffer[] = new char[8192];
int length;
while ((length = reader.read(buffer)) != -1) {
contents.append(buffer, 0, length); // depends on control dependency: [while], data = [none]
}
}
// Ensure resource is always closed
finally {
reader.close();
}
return contents.toString();
} } |
public class class_name {
public int doEndTag() throws JspException
{
HttpServletRequest request = (HttpServletRequest) pageContext.getRequest();
HttpServletResponse response = (HttpServletResponse) pageContext.getResponse();
ServletContext context = pageContext.getServletContext();
try {
boolean encoded = false;
UrlConfig urlConfig = ConfigUtil.getConfig().getUrlConfig();
if (urlConfig != null) {
encoded = !urlConfig.isUrlEncodeUrls();
}
FreezableMutableURI uri = new FreezableMutableURI();
uri.setEncoding(response.getCharacterEncoding());
uri.setURI(url, encoded);
boolean needsToBeSecure = false;
if (_params != null) {
uri.addParameters(_params, false );
}
if (!uri.isAbsolute() && PageFlowUtils.needsToBeSecure(context, request, url, true)) {
needsToBeSecure = true;
}
URLRewriterService.rewriteURL(context, request, response, uri, URLType.ACTION, needsToBeSecure);
String key = PageFlowUtils.getURLTemplateKey(URLType.ACTION, needsToBeSecure);
boolean forXML = TagRenderingBase.Factory.isXHTML(request);
URIContext uriContext = URIContextFactory.getInstance(forXML);
String uriString = URLRewriterService.getTemplatedURL(context, request, uri, key, uriContext);
write(response.encodeURL(uriString));
}
catch (URISyntaxException e) {
// report the error...
String s = Bundle.getString("Tags_RewriteURL_URLException",
new Object[]{url, e.getMessage()});
registerTagError(s, e);
reportErrors();
}
localRelease();
return EVAL_PAGE;
} } | public class class_name {
public int doEndTag() throws JspException
{
HttpServletRequest request = (HttpServletRequest) pageContext.getRequest();
HttpServletResponse response = (HttpServletResponse) pageContext.getResponse();
ServletContext context = pageContext.getServletContext();
try {
boolean encoded = false;
UrlConfig urlConfig = ConfigUtil.getConfig().getUrlConfig();
if (urlConfig != null) {
encoded = !urlConfig.isUrlEncodeUrls(); // depends on control dependency: [if], data = [none]
}
FreezableMutableURI uri = new FreezableMutableURI();
uri.setEncoding(response.getCharacterEncoding());
uri.setURI(url, encoded);
boolean needsToBeSecure = false;
if (_params != null) {
uri.addParameters(_params, false ); // depends on control dependency: [if], data = [(_params]
}
if (!uri.isAbsolute() && PageFlowUtils.needsToBeSecure(context, request, url, true)) {
needsToBeSecure = true; // depends on control dependency: [if], data = [none]
}
URLRewriterService.rewriteURL(context, request, response, uri, URLType.ACTION, needsToBeSecure);
String key = PageFlowUtils.getURLTemplateKey(URLType.ACTION, needsToBeSecure);
boolean forXML = TagRenderingBase.Factory.isXHTML(request);
URIContext uriContext = URIContextFactory.getInstance(forXML);
String uriString = URLRewriterService.getTemplatedURL(context, request, uri, key, uriContext);
write(response.encodeURL(uriString));
}
catch (URISyntaxException e) {
// report the error...
String s = Bundle.getString("Tags_RewriteURL_URLException",
new Object[]{url, e.getMessage()});
registerTagError(s, e);
reportErrors();
}
localRelease();
return EVAL_PAGE;
} } |
public class class_name {
static ByteBuf encodeString(String str) {
ByteBuf out = Unpooled.buffer(2);
byte[] raw;
try {
raw = str.getBytes("UTF-8");
//NB every Java platform has got UTF-8 encoding by default, so this
//exception are never raised.
} catch (UnsupportedEncodingException ex) {
Log.error("", ex);
return null;
}
//Utils.writeWord(out, raw.length);
out.writeShort(raw.length);
out.writeBytes(raw);
return out;
} } | public class class_name {
static ByteBuf encodeString(String str) {
ByteBuf out = Unpooled.buffer(2);
byte[] raw;
try {
raw = str.getBytes("UTF-8"); // depends on control dependency: [try], data = [none]
//NB every Java platform has got UTF-8 encoding by default, so this
//exception are never raised.
} catch (UnsupportedEncodingException ex) {
Log.error("", ex);
return null;
} // depends on control dependency: [catch], data = [none]
//Utils.writeWord(out, raw.length);
out.writeShort(raw.length);
out.writeBytes(raw);
return out;
} } |
public class class_name {
private static List<AdUnit> getAllAdUnits(
AdManagerServices adManagerServices, AdManagerSession session) throws RemoteException {
List<AdUnit> adUnits = new ArrayList<>();
// Get the InventoryService.
InventoryServiceInterface inventoryService =
adManagerServices.get(session, InventoryServiceInterface.class);
// Create a statement to select all ad units.
StatementBuilder statementBuilder =
new StatementBuilder().orderBy("id ASC").limit(StatementBuilder.SUGGESTED_PAGE_LIMIT);
// Default for total result set size.
int totalResultSetSize = 0;
do {
// Get ad units by statement.
AdUnitPage page = inventoryService.getAdUnitsByStatement(statementBuilder.toStatement());
if (page.getResults() != null) {
totalResultSetSize = page.getTotalResultSetSize();
Collections.addAll(adUnits, page.getResults());
}
statementBuilder.increaseOffsetBy(StatementBuilder.SUGGESTED_PAGE_LIMIT);
} while (statementBuilder.getOffset() < totalResultSetSize);
return adUnits;
} } | public class class_name {
private static List<AdUnit> getAllAdUnits(
AdManagerServices adManagerServices, AdManagerSession session) throws RemoteException {
List<AdUnit> adUnits = new ArrayList<>();
// Get the InventoryService.
InventoryServiceInterface inventoryService =
adManagerServices.get(session, InventoryServiceInterface.class);
// Create a statement to select all ad units.
StatementBuilder statementBuilder =
new StatementBuilder().orderBy("id ASC").limit(StatementBuilder.SUGGESTED_PAGE_LIMIT);
// Default for total result set size.
int totalResultSetSize = 0;
do {
// Get ad units by statement.
AdUnitPage page = inventoryService.getAdUnitsByStatement(statementBuilder.toStatement());
if (page.getResults() != null) {
totalResultSetSize = page.getTotalResultSetSize(); // depends on control dependency: [if], data = [none]
Collections.addAll(adUnits, page.getResults()); // depends on control dependency: [if], data = [none]
}
statementBuilder.increaseOffsetBy(StatementBuilder.SUGGESTED_PAGE_LIMIT);
} while (statementBuilder.getOffset() < totalResultSetSize);
return adUnits;
} } |
public class class_name {
protected boolean matchProfiles(final String[] checkProfiles) {
// test for all profiles
if ((checkProfiles != null) && (checkProfiles.length == 1) && checkProfiles[0].equals(ALL_PROFILES)) {
return true;
}
if (enabledProfiles == null || enabledProfiles.isEmpty()) {
if (validateAllProfilesByDefault) {
return true; // all profiles are considered as enabled
}
// only default profile is enabled
if ((checkProfiles == null) || (checkProfiles.length == 0)) {
return true;
}
for (String profile : checkProfiles) {
if (StringUtil.isEmpty(profile)) {
return true; // default profile
}
if (profile.equals(DEFAULT_PROFILE)) {
return true;
}
}
return false;
}
// there are enabled profiles
if ((checkProfiles == null) || (checkProfiles.length == 0)) {
return enabledProfiles.contains(DEFAULT_PROFILE);
}
boolean result = false;
for (String profile : checkProfiles) {
boolean b = true;
boolean must = false;
if (StringUtil.isEmpty(profile)) {
profile = DEFAULT_PROFILE;
} else if (profile.charAt(0) == '-') {
profile = profile.substring(1);
b = false;
} else if (profile.charAt(0) == '+') {
profile = profile.substring(1);
must = true;
}
if (enabledProfiles.contains(profile)) {
if (!b) {
return false;
}
result = true;
} else {
if (must) {
return false;
}
}
}
return result;
} } | public class class_name {
protected boolean matchProfiles(final String[] checkProfiles) {
// test for all profiles
if ((checkProfiles != null) && (checkProfiles.length == 1) && checkProfiles[0].equals(ALL_PROFILES)) {
return true; // depends on control dependency: [if], data = [none]
}
if (enabledProfiles == null || enabledProfiles.isEmpty()) {
if (validateAllProfilesByDefault) {
return true; // all profiles are considered as enabled // depends on control dependency: [if], data = [none]
}
// only default profile is enabled
if ((checkProfiles == null) || (checkProfiles.length == 0)) {
return true; // depends on control dependency: [if], data = [none]
}
for (String profile : checkProfiles) {
if (StringUtil.isEmpty(profile)) {
return true; // default profile // depends on control dependency: [if], data = [none]
}
if (profile.equals(DEFAULT_PROFILE)) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false; // depends on control dependency: [if], data = [none]
}
// there are enabled profiles
if ((checkProfiles == null) || (checkProfiles.length == 0)) {
return enabledProfiles.contains(DEFAULT_PROFILE); // depends on control dependency: [if], data = [none]
}
boolean result = false;
for (String profile : checkProfiles) {
boolean b = true;
boolean must = false;
if (StringUtil.isEmpty(profile)) {
profile = DEFAULT_PROFILE; // depends on control dependency: [if], data = [none]
} else if (profile.charAt(0) == '-') {
profile = profile.substring(1); // depends on control dependency: [if], data = [none]
b = false; // depends on control dependency: [if], data = [none]
} else if (profile.charAt(0) == '+') {
profile = profile.substring(1); // depends on control dependency: [if], data = [none]
must = true; // depends on control dependency: [if], data = [none]
}
if (enabledProfiles.contains(profile)) {
if (!b) {
return false; // depends on control dependency: [if], data = [none]
}
result = true; // depends on control dependency: [if], data = [none]
} else {
if (must) {
return false; // depends on control dependency: [if], data = [none]
}
}
}
return result;
} } |
public class class_name {
public OptionalLong extractRevision(String buildName) {
// Gets the regex for the pattern
String regex = getRegex();
// Matching
Matcher matcher = Pattern.compile(regex).matcher(buildName);
if (matcher.matches()) {
String token = matcher.group(1);
return OptionalLong.of(
Long.parseLong(token, 10)
);
} else {
return OptionalLong.empty();
}
} } | public class class_name {
public OptionalLong extractRevision(String buildName) {
// Gets the regex for the pattern
String regex = getRegex();
// Matching
Matcher matcher = Pattern.compile(regex).matcher(buildName);
if (matcher.matches()) {
String token = matcher.group(1);
return OptionalLong.of(
Long.parseLong(token, 10)
); // depends on control dependency: [if], data = [none]
} else {
return OptionalLong.empty(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void setPolicyLevels(java.util.Collection<String> policyLevels) {
if (policyLevels == null) {
this.policyLevels = null;
return;
}
this.policyLevels = new java.util.ArrayList<String>(policyLevels);
} } | public class class_name {
public void setPolicyLevels(java.util.Collection<String> policyLevels) {
if (policyLevels == null) {
this.policyLevels = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.policyLevels = new java.util.ArrayList<String>(policyLevels);
} } |
public class class_name {
public Map<Long, Long> reconfigureOnFault(Set<Long> hsIds, FaultMessage fm, Set<Long> unknownFaultedSites) {
boolean proceed = false;
do {
Discard ignoreIt = mayIgnore(hsIds, fm);
if (Discard.DoNot == ignoreIt) {
m_inTrouble.put(fm.failedSite, fm.witnessed || fm.decided);
m_recoveryLog.info("Agreement, Processing " + fm);
proceed = true;
} else {
ignoreIt.log(fm);
}
if (Discard.Unknown == ignoreIt) {
unknownFaultedSites.add(fm.failedSite);
}
fm = (FaultMessage) m_mailbox.recv(justFailures);
} while (fm != null);
if (!proceed) {
return ImmutableMap.of();
}
m_inTroubleCount = m_inTrouble.size();
// we are here if failed site was not previously recorded
// or it was previously recorded but it became witnessed from unwitnessed
m_seeker.startSeekingFor(Sets.difference(hsIds, m_failedSites), m_inTrouble);
if (m_recoveryLog.isDebugEnabled()) {
m_recoveryLog.debug(String.format("\n %s\n %s\n %s\n %s\n %s",
m_seeker.dumpAlive(), m_seeker.dumpDead(),
m_seeker.dumpReported(), m_seeker.dumpSurvivors(),
dumpInTrouble()));
}
discoverGlobalFaultData_send(hsIds);
while (discoverGlobalFaultData_rcv(hsIds)) {
Map<Long, Long> lastTxnIdByFailedSite = extractGlobalFaultData(hsIds);
if (lastTxnIdByFailedSite.isEmpty()) {
return ImmutableMap.of();
}
Set<Long> witnessed = Maps.filterValues(m_inTrouble, equalTo(Boolean.TRUE)).keySet();
Set<Long> notClosed = Sets.difference(witnessed, lastTxnIdByFailedSite.keySet());
if ( !notClosed.isEmpty()) {
m_recoveryLog.warn("Agreement, witnessed but not decided: ["
+ CoreUtils.hsIdCollectionToString(notClosed)
+ "] seeker: " + m_seeker);
}
if (!notifyOnKill(hsIds, lastTxnIdByFailedSite)) {
continue;
}
m_failedSites.addAll( lastTxnIdByFailedSite.keySet());
m_failedSitesCount = m_failedSites.size();
m_recoveryLog.info(
"Agreement, Adding "
+ CoreUtils.hsIdCollectionToString(lastTxnIdByFailedSite.keySet())
+ " to failed sites history");
clearInTrouble(lastTxnIdByFailedSite.keySet());
m_seeker.clear();
return lastTxnIdByFailedSite;
}
return ImmutableMap.of();
} } | public class class_name {
public Map<Long, Long> reconfigureOnFault(Set<Long> hsIds, FaultMessage fm, Set<Long> unknownFaultedSites) {
boolean proceed = false;
do {
Discard ignoreIt = mayIgnore(hsIds, fm);
if (Discard.DoNot == ignoreIt) {
m_inTrouble.put(fm.failedSite, fm.witnessed || fm.decided); // depends on control dependency: [if], data = [none]
m_recoveryLog.info("Agreement, Processing " + fm); // depends on control dependency: [if], data = [none]
proceed = true; // depends on control dependency: [if], data = [none]
} else {
ignoreIt.log(fm); // depends on control dependency: [if], data = [none]
}
if (Discard.Unknown == ignoreIt) {
unknownFaultedSites.add(fm.failedSite); // depends on control dependency: [if], data = [none]
}
fm = (FaultMessage) m_mailbox.recv(justFailures);
} while (fm != null);
if (!proceed) {
return ImmutableMap.of(); // depends on control dependency: [if], data = [none]
}
m_inTroubleCount = m_inTrouble.size();
// we are here if failed site was not previously recorded
// or it was previously recorded but it became witnessed from unwitnessed
m_seeker.startSeekingFor(Sets.difference(hsIds, m_failedSites), m_inTrouble);
if (m_recoveryLog.isDebugEnabled()) {
m_recoveryLog.debug(String.format("\n %s\n %s\n %s\n %s\n %s",
m_seeker.dumpAlive(), m_seeker.dumpDead(),
m_seeker.dumpReported(), m_seeker.dumpSurvivors(),
dumpInTrouble())); // depends on control dependency: [if], data = [none]
}
discoverGlobalFaultData_send(hsIds);
while (discoverGlobalFaultData_rcv(hsIds)) {
Map<Long, Long> lastTxnIdByFailedSite = extractGlobalFaultData(hsIds);
if (lastTxnIdByFailedSite.isEmpty()) {
return ImmutableMap.of(); // depends on control dependency: [if], data = [none]
}
Set<Long> witnessed = Maps.filterValues(m_inTrouble, equalTo(Boolean.TRUE)).keySet();
Set<Long> notClosed = Sets.difference(witnessed, lastTxnIdByFailedSite.keySet());
if ( !notClosed.isEmpty()) {
m_recoveryLog.warn("Agreement, witnessed but not decided: ["
+ CoreUtils.hsIdCollectionToString(notClosed)
+ "] seeker: " + m_seeker); // depends on control dependency: [if], data = [none]
}
if (!notifyOnKill(hsIds, lastTxnIdByFailedSite)) {
continue;
}
m_failedSites.addAll( lastTxnIdByFailedSite.keySet()); // depends on control dependency: [while], data = [none]
m_failedSitesCount = m_failedSites.size(); // depends on control dependency: [while], data = [none]
m_recoveryLog.info(
"Agreement, Adding "
+ CoreUtils.hsIdCollectionToString(lastTxnIdByFailedSite.keySet())
+ " to failed sites history"); // depends on control dependency: [while], data = [none]
clearInTrouble(lastTxnIdByFailedSite.keySet()); // depends on control dependency: [while], data = [none]
m_seeker.clear(); // depends on control dependency: [while], data = [none]
return lastTxnIdByFailedSite; // depends on control dependency: [while], data = [none]
}
return ImmutableMap.of();
} } |
public class class_name {
public static Pair<String,String> pair (final String s1, final String s2)
{
if (StringUtils.isBlank(s1))
{
LOG.warn("Blank first arg");
}
if (StringUtils.isBlank(s2))
{
LOG.warn("Blank second arg for: "+s1);
}
return new PairImpl<String,String> (s1, s2);
} } | public class class_name {
public static Pair<String,String> pair (final String s1, final String s2)
{
if (StringUtils.isBlank(s1))
{
LOG.warn("Blank first arg"); // depends on control dependency: [if], data = [none]
}
if (StringUtils.isBlank(s2))
{
LOG.warn("Blank second arg for: "+s1); // depends on control dependency: [if], data = [none]
}
return new PairImpl<String,String> (s1, s2);
} } |
public class class_name {
private long readWord(final int position) {
if(position < 0) {
throw new ArrayIndexOutOfBoundsException(position);
}
// First bit of the word
final long firstBitIndex = (position * wordLength);
final int firstByteIndex = (bytePadding + (int)(firstBitIndex / BITS_PER_BYTE));
final int firstByteSkipBits = (int)(firstBitIndex % BITS_PER_BYTE);
// Last bit of the word
final long lastBitIndex = (firstBitIndex + wordLength - 1);
final int lastByteIndex = (bytePadding + (int)(lastBitIndex / BITS_PER_BYTE));
final int lastByteBitsToConsume;
final int bitsAfterByteBoundary = (int)((lastBitIndex + 1) % BITS_PER_BYTE);
// If the word terminates at the end of the last byte, consume the whole
// last byte.
if(bitsAfterByteBoundary == 0) {
lastByteBitsToConsume = BITS_PER_BYTE;
} else {
// Otherwise, only consume what is necessary.
lastByteBitsToConsume = bitsAfterByteBoundary;
}
if(lastByteIndex >= bytes.length) {
throw new ArrayIndexOutOfBoundsException("Word out of bounds of backing array.");
}
// Accumulator
long value = 0;
// --------------------------------------------------------------------
// First byte
final int bitsRemainingInFirstByte = (BITS_PER_BYTE - firstByteSkipBits);
final int bitsToConsumeInFirstByte = Math.min(bitsRemainingInFirstByte, wordLength);
long firstByte = (long)bytes[firstByteIndex];
// Mask off the bits to skip in the first byte.
final long firstByteMask = ((1L << bitsRemainingInFirstByte) - 1L);
firstByte &= firstByteMask;
// Right-align relevant bits of first byte.
firstByte >>>= (bitsRemainingInFirstByte - bitsToConsumeInFirstByte);
value |= firstByte;
// If the first byte contains the whole word, short-circuit.
if(firstByteIndex == lastByteIndex) {
return value;
}
// --------------------------------------------------------------------
// Middle bytes
final int middleByteCount = (lastByteIndex - firstByteIndex - 1);
for(int i=0; i<middleByteCount; i++) {
final long middleByte = (bytes[firstByteIndex + i + 1] & BYTE_MASK);
// Push middle byte onto accumulator.
value <<= BITS_PER_BYTE;
value |= middleByte;
}
// --------------------------------------------------------------------
// Last byte
long lastByte = (bytes[lastByteIndex] & BYTE_MASK);
lastByte >>= (BITS_PER_BYTE - lastByteBitsToConsume);
value <<= lastByteBitsToConsume;
value |= lastByte;
return value;
} } | public class class_name {
private long readWord(final int position) {
if(position < 0) {
throw new ArrayIndexOutOfBoundsException(position);
}
// First bit of the word
final long firstBitIndex = (position * wordLength);
final int firstByteIndex = (bytePadding + (int)(firstBitIndex / BITS_PER_BYTE));
final int firstByteSkipBits = (int)(firstBitIndex % BITS_PER_BYTE);
// Last bit of the word
final long lastBitIndex = (firstBitIndex + wordLength - 1);
final int lastByteIndex = (bytePadding + (int)(lastBitIndex / BITS_PER_BYTE));
final int lastByteBitsToConsume;
final int bitsAfterByteBoundary = (int)((lastBitIndex + 1) % BITS_PER_BYTE);
// If the word terminates at the end of the last byte, consume the whole
// last byte.
if(bitsAfterByteBoundary == 0) {
lastByteBitsToConsume = BITS_PER_BYTE; // depends on control dependency: [if], data = [none]
} else {
// Otherwise, only consume what is necessary.
lastByteBitsToConsume = bitsAfterByteBoundary; // depends on control dependency: [if], data = [none]
}
if(lastByteIndex >= bytes.length) {
throw new ArrayIndexOutOfBoundsException("Word out of bounds of backing array.");
}
// Accumulator
long value = 0;
// --------------------------------------------------------------------
// First byte
final int bitsRemainingInFirstByte = (BITS_PER_BYTE - firstByteSkipBits);
final int bitsToConsumeInFirstByte = Math.min(bitsRemainingInFirstByte, wordLength);
long firstByte = (long)bytes[firstByteIndex];
// Mask off the bits to skip in the first byte.
final long firstByteMask = ((1L << bitsRemainingInFirstByte) - 1L);
firstByte &= firstByteMask;
// Right-align relevant bits of first byte.
firstByte >>>= (bitsRemainingInFirstByte - bitsToConsumeInFirstByte);
value |= firstByte;
// If the first byte contains the whole word, short-circuit.
if(firstByteIndex == lastByteIndex) {
return value; // depends on control dependency: [if], data = [none]
}
// --------------------------------------------------------------------
// Middle bytes
final int middleByteCount = (lastByteIndex - firstByteIndex - 1);
for(int i=0; i<middleByteCount; i++) {
final long middleByte = (bytes[firstByteIndex + i + 1] & BYTE_MASK);
// Push middle byte onto accumulator.
value <<= BITS_PER_BYTE; // depends on control dependency: [for], data = [none]
value |= middleByte; // depends on control dependency: [for], data = [none]
}
// --------------------------------------------------------------------
// Last byte
long lastByte = (bytes[lastByteIndex] & BYTE_MASK);
lastByte >>= (BITS_PER_BYTE - lastByteBitsToConsume);
value <<= lastByteBitsToConsume;
value |= lastByte;
return value;
} } |
public class class_name {
public static int saturatedPow(int b, int k) {
checkNonNegative("exponent", k);
switch (b) {
case 0:
return (k == 0) ? 1 : 0;
case 1:
return 1;
case (-1):
return ((k & 1) == 0) ? 1 : -1;
case 2:
if (k >= Integer.SIZE - 1) {
return Integer.MAX_VALUE;
}
return 1 << k;
case (-2):
if (k >= Integer.SIZE) {
return Integer.MAX_VALUE + (k & 1);
}
return ((k & 1) == 0) ? 1 << k : -1 << k;
default:
// continue below to handle the general case
}
int accum = 1;
// if b is negative and k is odd then the limit is MIN otherwise the limit is MAX
int limit = Integer.MAX_VALUE + ((b >>> Integer.SIZE - 1) & (k & 1));
while (true) {
switch (k) {
case 0:
return accum;
case 1:
return saturatedMultiply(accum, b);
default:
if ((k & 1) != 0) {
accum = saturatedMultiply(accum, b);
}
k >>= 1;
if (k > 0) {
if (-FLOOR_SQRT_MAX_INT > b | b > FLOOR_SQRT_MAX_INT) {
return limit;
}
b *= b;
}
}
}
} } | public class class_name {
public static int saturatedPow(int b, int k) {
checkNonNegative("exponent", k);
switch (b) {
case 0:
return (k == 0) ? 1 : 0;
case 1:
return 1;
case (-1):
return ((k & 1) == 0) ? 1 : -1;
case 2:
if (k >= Integer.SIZE - 1) {
return Integer.MAX_VALUE;
// depends on control dependency: [if], data = [none]
}
return 1 << k;
case (-2):
if (k >= Integer.SIZE) {
return Integer.MAX_VALUE + (k & 1);
// depends on control dependency: [if], data = [(k]
}
return ((k & 1) == 0) ? 1 << k : -1 << k;
default:
// continue below to handle the general case
}
int accum = 1;
// if b is negative and k is odd then the limit is MIN otherwise the limit is MAX
int limit = Integer.MAX_VALUE + ((b >>> Integer.SIZE - 1) & (k & 1));
while (true) {
switch (k) {
case 0:
return accum;
case 1:
return saturatedMultiply(accum, b);
default:
if ((k & 1) != 0) {
accum = saturatedMultiply(accum, b);
// depends on control dependency: [if], data = [none]
}
k >>= 1;
if (k > 0) {
if (-FLOOR_SQRT_MAX_INT > b | b > FLOOR_SQRT_MAX_INT) {
return limit;
// depends on control dependency: [if], data = [none]
}
b *= b;
// depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
protected synchronized void populateStatistics(final boolean noCache) {
//check again before starting the work.
if (noCache || System.currentTimeMillis() - lastRefreshedTime > cacheTimeInMilliseconds) {
final ExecutorInfo stats = new ExecutorInfo();
fillRemainingMemoryPercent(stats);
fillRemainingFlowCapacityAndLastDispatchedTime(stats);
fillCpuUsage(stats);
cachedstats = stats;
lastRefreshedTime = System.currentTimeMillis();
}
} } | public class class_name {
protected synchronized void populateStatistics(final boolean noCache) {
//check again before starting the work.
if (noCache || System.currentTimeMillis() - lastRefreshedTime > cacheTimeInMilliseconds) {
final ExecutorInfo stats = new ExecutorInfo();
fillRemainingMemoryPercent(stats); // depends on control dependency: [if], data = [none]
fillRemainingFlowCapacityAndLastDispatchedTime(stats); // depends on control dependency: [if], data = [none]
fillCpuUsage(stats); // depends on control dependency: [if], data = [none]
cachedstats = stats; // depends on control dependency: [if], data = [none]
lastRefreshedTime = System.currentTimeMillis(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void getEachCellContent(int YNum, int cc, TableCandidate tc) {
int footnoteLineIndex = tc.getFootnoteBeginRow();
String[] contentsInRows = new String[footnoteLineIndex];
float[] leftX_tableColumns = tc.getLeftX_tableColumns();
float[] rightX_tableColumns = tc.getRightX_tableColumns();
String[][] cells = new String[YNum][cc];
boolean[][] crossCells = new boolean[YNum][cc];
for (int tt=0; tt<tc.getRows().size(); tt++) {
for (int qq=0; qq<cc; qq++) {
cells[tt][qq] = "";
crossCells[tt][qq]=false;
}
}
for (int tt=0; tt<footnoteLineIndex; tt++) {
String cellsThisLine = "";
TableRow row = tc.getRows().get(tt);
int cellThisLine = row.getCells().size(); // pmoj.objectNum_eachY[tt];
for (int zz=0; zz<cellThisLine; zz++) {
for (int qq=0; qq<cc; qq++) {
if ( (row.getCells().get(zz).getX()>=leftX_tableColumns[qq])
&& (row.getCells().get(zz).getEndX()<=rightX_tableColumns[qq]) )
cells[tt][qq] =cells[tt][qq] + row.getCells().get(zz).getText() + " ";
if ((row.getCells().get(zz).getX()<leftX_tableColumns[qq])
&&(leftX_tableColumns[qq]<=row.getCells().get(zz).getEndX()) )
cells[tt][qq] = cells[tt][qq] + row.getCells().get(zz).getText() + " ";
if ((leftX_tableColumns[qq]>=row.getCells().get(zz).getX())
&& (row.getCells().get(zz).getX()>=rightX_tableColumns[qq])
&&(rightX_tableColumns[qq]<row.getCells().get(zz).getEndX()) )
cells[tt][qq] = cells[tt][qq] + row.getCells().get(zz).getText() + " ";
}
}
for (int zz=0; zz<cellThisLine; zz++) {
int crossCellBegin = 1000;
int crossCellEnd=0;
for (int qq=0; qq<cc; qq++) {
if ( (qq>0) && (cells[tt][qq-1].length()==0)
&& (row.getCells().get(zz).getX()<=rightX_tableColumns[qq-1])
&& (row.getCells().get(zz).getX()<leftX_tableColumns[qq])
&& (leftX_tableColumns[qq]<=row.getCells().get(zz).getEndX()) ) {
crossCells[tt][qq]=true;
crossCells[tt][qq-1]=true;
cells[tt][qq-1] = row.getCells().get(zz).getText();
crossCellBegin=qq-1;
}
if ((qq<(cc-1)) && (cells[tt][qq+1].length()==0)
&& (row.getCells().get(zz).getEndX()>=leftX_tableColumns[qq+1])
&& (row.getCells().get(zz).getX()<=rightX_tableColumns[qq])
&& (rightX_tableColumns[qq]<row.getCells().get(zz).getEndX()) ) {
crossCells[tt][qq]=true;
crossCells[tt][qq+1]=true;
cells[tt][qq+1] = row.getCells().get(zz).getText();
crossCellEnd=qq+1;
}
}
}
for (int qq=0; qq<cc; qq++) {
cellsThisLine = cellsThisLine + cells[tt][qq] + "; ";
}
contentsInRows[tt]=cellsThisLine;
}
tc.setCells(cells);
tc.setCrossCells(crossCells);
} } | public class class_name {
public void getEachCellContent(int YNum, int cc, TableCandidate tc) {
int footnoteLineIndex = tc.getFootnoteBeginRow();
String[] contentsInRows = new String[footnoteLineIndex];
float[] leftX_tableColumns = tc.getLeftX_tableColumns();
float[] rightX_tableColumns = tc.getRightX_tableColumns();
String[][] cells = new String[YNum][cc];
boolean[][] crossCells = new boolean[YNum][cc];
for (int tt=0; tt<tc.getRows().size(); tt++) {
for (int qq=0; qq<cc; qq++) {
cells[tt][qq] = "";
crossCells[tt][qq]=false;
// depends on control dependency: [for], data = [qq]
}
}
for (int tt=0; tt<footnoteLineIndex; tt++) {
String cellsThisLine = "";
TableRow row = tc.getRows().get(tt);
int cellThisLine = row.getCells().size(); // pmoj.objectNum_eachY[tt];
for (int zz=0; zz<cellThisLine; zz++) {
for (int qq=0; qq<cc; qq++) {
if ( (row.getCells().get(zz).getX()>=leftX_tableColumns[qq])
&& (row.getCells().get(zz).getEndX()<=rightX_tableColumns[qq]) )
cells[tt][qq] =cells[tt][qq] + row.getCells().get(zz).getText() + " ";
if ((row.getCells().get(zz).getX()<leftX_tableColumns[qq])
&&(leftX_tableColumns[qq]<=row.getCells().get(zz).getEndX()) )
cells[tt][qq] = cells[tt][qq] + row.getCells().get(zz).getText() + " ";
if ((leftX_tableColumns[qq]>=row.getCells().get(zz).getX())
&& (row.getCells().get(zz).getX()>=rightX_tableColumns[qq])
&&(rightX_tableColumns[qq]<row.getCells().get(zz).getEndX()) )
cells[tt][qq] = cells[tt][qq] + row.getCells().get(zz).getText() + " ";
}
}
for (int zz=0; zz<cellThisLine; zz++) {
int crossCellBegin = 1000;
int crossCellEnd=0;
for (int qq=0; qq<cc; qq++) {
if ( (qq>0) && (cells[tt][qq-1].length()==0)
&& (row.getCells().get(zz).getX()<=rightX_tableColumns[qq-1])
&& (row.getCells().get(zz).getX()<leftX_tableColumns[qq])
&& (leftX_tableColumns[qq]<=row.getCells().get(zz).getEndX()) ) {
crossCells[tt][qq]=true;
// depends on control dependency: [if], data = [none]
crossCells[tt][qq-1]=true;
// depends on control dependency: [if], data = [none]
cells[tt][qq-1] = row.getCells().get(zz).getText();
// depends on control dependency: [if], data = [none]
crossCellBegin=qq-1;
// depends on control dependency: [if], data = [none]
}
if ((qq<(cc-1)) && (cells[tt][qq+1].length()==0)
&& (row.getCells().get(zz).getEndX()>=leftX_tableColumns[qq+1])
&& (row.getCells().get(zz).getX()<=rightX_tableColumns[qq])
&& (rightX_tableColumns[qq]<row.getCells().get(zz).getEndX()) ) {
crossCells[tt][qq]=true;
// depends on control dependency: [if], data = [none]
crossCells[tt][qq+1]=true;
// depends on control dependency: [if], data = [none]
cells[tt][qq+1] = row.getCells().get(zz).getText();
// depends on control dependency: [if], data = [none]
crossCellEnd=qq+1;
// depends on control dependency: [if], data = [none]
}
}
}
for (int qq=0; qq<cc; qq++) {
cellsThisLine = cellsThisLine + cells[tt][qq] + "; ";
// depends on control dependency: [for], data = [qq]
}
contentsInRows[tt]=cellsThisLine;
// depends on control dependency: [for], data = [tt]
}
tc.setCells(cells);
tc.setCrossCells(crossCells);
} } |
public class class_name {
private List<String> existingRootPackages() {
ArrayList<String> packages = new ArrayList<>();
packages.addAll(Arrays.asList(KNOWN_ROOT_APPS_PACKAGES));
packages.addAll(Arrays.asList(KNOWN_DANGEROUS_APPS_PACKAGES));
packages.addAll(Arrays.asList(KNOWN_ROOT_CLOAKING_PACKAGES));
PackageManager pm = context.getPackageManager();
List<String> packagesFound = new ArrayList<>();
for (String packageName : packages) {
try {
// Root app detected
pm.getPackageInfo(packageName, 0);
packagesFound.add(packageName);
} catch (PackageManager.NameNotFoundException e) {
// Exception thrown, package is not installed into the system
}
}
return packagesFound;
} } | public class class_name {
private List<String> existingRootPackages() {
ArrayList<String> packages = new ArrayList<>();
packages.addAll(Arrays.asList(KNOWN_ROOT_APPS_PACKAGES));
packages.addAll(Arrays.asList(KNOWN_DANGEROUS_APPS_PACKAGES));
packages.addAll(Arrays.asList(KNOWN_ROOT_CLOAKING_PACKAGES));
PackageManager pm = context.getPackageManager();
List<String> packagesFound = new ArrayList<>();
for (String packageName : packages) {
try {
// Root app detected
pm.getPackageInfo(packageName, 0); // depends on control dependency: [try], data = [none]
packagesFound.add(packageName); // depends on control dependency: [try], data = [none]
} catch (PackageManager.NameNotFoundException e) {
// Exception thrown, package is not installed into the system
} // depends on control dependency: [catch], data = [none]
}
return packagesFound;
} } |
public class class_name {
@Override
protected void setupLogging(@Nullable final Level level) {
TargetConsoleAppender.setTarget(System.out);
// adapt configuration properties for logging unless already set
if (level != null) {
maybeSetProperty("shell.logging.console.threshold", level.name());
maybeSetProperty("shell.logging.file.threshold", level.name());
maybeSetProperty("shell.logging.root-level", level.name());
}
super.setupLogging(level);
} } | public class class_name {
@Override
protected void setupLogging(@Nullable final Level level) {
TargetConsoleAppender.setTarget(System.out);
// adapt configuration properties for logging unless already set
if (level != null) {
maybeSetProperty("shell.logging.console.threshold", level.name()); // depends on control dependency: [if], data = [none]
maybeSetProperty("shell.logging.file.threshold", level.name()); // depends on control dependency: [if], data = [none]
maybeSetProperty("shell.logging.root-level", level.name()); // depends on control dependency: [if], data = [none]
}
super.setupLogging(level);
} } |
public class class_name {
public static Method getMethod(Class<?> clazz, String methodName, Class<?>... parameterTypes)
{
try {
Method method = clazz.getDeclaredMethod(methodName, parameterTypes);
method.setAccessible(true);
return method;
}
catch(NoSuchMethodException e) {
throw new NoSuchBeingException(e);
}
catch(SecurityException e) {
throw new BugError(e);
}
} } | public class class_name {
public static Method getMethod(Class<?> clazz, String methodName, Class<?>... parameterTypes)
{
try {
Method method = clazz.getDeclaredMethod(methodName, parameterTypes);
method.setAccessible(true);
// depends on control dependency: [try], data = [none]
return method;
// depends on control dependency: [try], data = [none]
}
catch(NoSuchMethodException e) {
throw new NoSuchBeingException(e);
}
// depends on control dependency: [catch], data = [none]
catch(SecurityException e) {
throw new BugError(e);
}
// depends on control dependency: [catch], data = [none]
} } |
public class class_name {
XMLName toXMLName(Context cx, Object nameValue) {
XMLName result;
if (nameValue instanceof XMLName) {
result = (XMLName)nameValue;
} else if (nameValue instanceof QName) {
QName qname = (QName)nameValue;
result = XMLName.formProperty(qname.uri(), qname.localName());
} else if (nameValue instanceof String) {
result = toXMLNameFromString(cx, (String)nameValue);
} else if (nameValue instanceof Boolean
|| nameValue instanceof Number
|| nameValue == Undefined.instance
|| nameValue == null) {
throw badXMLName(nameValue);
} else {
String name = ScriptRuntime.toString(nameValue);
result = toXMLNameFromString(cx, name);
}
return result;
} } | public class class_name {
XMLName toXMLName(Context cx, Object nameValue) {
XMLName result;
if (nameValue instanceof XMLName) {
result = (XMLName)nameValue; // depends on control dependency: [if], data = [none]
} else if (nameValue instanceof QName) {
QName qname = (QName)nameValue;
result = XMLName.formProperty(qname.uri(), qname.localName()); // depends on control dependency: [if], data = [none]
} else if (nameValue instanceof String) {
result = toXMLNameFromString(cx, (String)nameValue); // depends on control dependency: [if], data = [none]
} else if (nameValue instanceof Boolean
|| nameValue instanceof Number
|| nameValue == Undefined.instance
|| nameValue == null) {
throw badXMLName(nameValue);
} else {
String name = ScriptRuntime.toString(nameValue);
result = toXMLNameFromString(cx, name); // depends on control dependency: [if], data = [none]
}
return result;
} } |
public class class_name {
public Node getFirstChild() {
if (childNodes == null) {
return null;
}
if (childNodes.isEmpty()) {
return null;
}
return childNodes.get(0);
} } | public class class_name {
public Node getFirstChild() {
if (childNodes == null) {
return null; // depends on control dependency: [if], data = [none]
}
if (childNodes.isEmpty()) {
return null; // depends on control dependency: [if], data = [none]
}
return childNodes.get(0);
} } |
public class class_name {
public OutlierResult run(Relation<V> relation) {
DoubleMinMax mm = new DoubleMinMax();
// resulting scores
WritableDoubleDataStore oscores = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_TEMP | DataStoreFactory.HINT_HOT);
// Compute mean and covariance Matrix
CovarianceMatrix temp = CovarianceMatrix.make(relation);
double[] mean = temp.getMeanVector(relation).toArray();
// debugFine(mean.toString());
double[][] covarianceMatrix = temp.destroyToPopulationMatrix();
// debugFine(covarianceMatrix.toString());
double[][] covarianceTransposed = inverse(covarianceMatrix);
// Normalization factors for Gaussian PDF
double det = new LUDecomposition(covarianceMatrix).det();
final double fakt = 1.0 / FastMath.sqrt(MathUtil.powi(MathUtil.TWOPI, RelationUtil.dimensionality(relation)) * det);
// for each object compute Mahalanobis distance
for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
double[] x = minusEquals(relation.get(iditer).toArray(), mean);
// Gaussian PDF
final double mDist = transposeTimesTimes(x, covarianceTransposed, x);
final double prob = fakt * FastMath.exp(-mDist * .5);
mm.put(prob);
oscores.putDouble(iditer, prob);
}
final OutlierScoreMeta meta;
if(invert) {
double max = mm.getMax() != 0 ? mm.getMax() : 1.;
for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
oscores.putDouble(iditer, (max - oscores.doubleValue(iditer)) / max);
}
meta = new BasicOutlierScoreMeta(0.0, 1.0);
}
else {
meta = new InvertedOutlierScoreMeta(mm.getMin(), mm.getMax(), 0.0, Double.POSITIVE_INFINITY);
}
DoubleRelation res = new MaterializedDoubleRelation("Gaussian Model Outlier Score", "gaussian-model-outlier", oscores, relation.getDBIDs());
return new OutlierResult(meta, res);
} } | public class class_name {
public OutlierResult run(Relation<V> relation) {
DoubleMinMax mm = new DoubleMinMax();
// resulting scores
WritableDoubleDataStore oscores = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_TEMP | DataStoreFactory.HINT_HOT);
// Compute mean and covariance Matrix
CovarianceMatrix temp = CovarianceMatrix.make(relation);
double[] mean = temp.getMeanVector(relation).toArray();
// debugFine(mean.toString());
double[][] covarianceMatrix = temp.destroyToPopulationMatrix();
// debugFine(covarianceMatrix.toString());
double[][] covarianceTransposed = inverse(covarianceMatrix);
// Normalization factors for Gaussian PDF
double det = new LUDecomposition(covarianceMatrix).det();
final double fakt = 1.0 / FastMath.sqrt(MathUtil.powi(MathUtil.TWOPI, RelationUtil.dimensionality(relation)) * det);
// for each object compute Mahalanobis distance
for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
double[] x = minusEquals(relation.get(iditer).toArray(), mean);
// Gaussian PDF
final double mDist = transposeTimesTimes(x, covarianceTransposed, x);
final double prob = fakt * FastMath.exp(-mDist * .5);
mm.put(prob); // depends on control dependency: [for], data = [none]
oscores.putDouble(iditer, prob); // depends on control dependency: [for], data = [iditer]
}
final OutlierScoreMeta meta;
if(invert) {
double max = mm.getMax() != 0 ? mm.getMax() : 1.;
for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
oscores.putDouble(iditer, (max - oscores.doubleValue(iditer)) / max); // depends on control dependency: [for], data = [iditer]
}
meta = new BasicOutlierScoreMeta(0.0, 1.0); // depends on control dependency: [if], data = [none]
}
else {
meta = new InvertedOutlierScoreMeta(mm.getMin(), mm.getMax(), 0.0, Double.POSITIVE_INFINITY); // depends on control dependency: [if], data = [none]
}
DoubleRelation res = new MaterializedDoubleRelation("Gaussian Model Outlier Score", "gaussian-model-outlier", oscores, relation.getDBIDs());
return new OutlierResult(meta, res);
} } |
public class class_name {
public Hoarde<T> each(Consumer<T> tocall) {
for (int i = 0; i < actors.length; i++) {
tocall.accept( (T) actors[i] );
}
return this;
} } | public class class_name {
public Hoarde<T> each(Consumer<T> tocall) {
for (int i = 0; i < actors.length; i++) {
tocall.accept( (T) actors[i] ); // depends on control dependency: [for], data = [i]
}
return this;
} } |
public class class_name {
public Parser<RECORD> addParseTarget(final Method method,
final SetterPolicy setterPolicy,
final List<String> fieldValues) {
assembled = false;
if (method == null || fieldValues == null) {
return this; // Nothing to do here
}
final Class<?>[] parameters = method.getParameterTypes();
if (
// Setters that receive a String
((parameters.length == 1) && (parameters[0] == String.class)) ||
((parameters.length == 2) && (parameters[0] == String.class) && (parameters[1] == String.class)) ||
// Setters that receive a Long
((parameters.length == 1) && (parameters[0] == Long.class)) ||
((parameters.length == 2) && (parameters[0] == String.class) && (parameters[1] == Long.class)) ||
// Setters that receive a Double
((parameters.length == 1) && (parameters[0] == Double.class)) ||
((parameters.length == 2) && (parameters[0] == String.class) && (parameters[1] == Double.class))
) {
for (final String fieldValue : fieldValues) {
if (fieldValue == null) {
continue;
}
String cleanedFieldValue = cleanupFieldValue(fieldValue);
if (!fieldValue.equals(cleanedFieldValue)) {
LOG.warn("The requested \"{}\" was converted into \"{}\"", fieldValue, cleanedFieldValue);
}
// We have 1 real target
Set<Pair<Method, SetterPolicy>> fieldTargets = targets.computeIfAbsent(cleanedFieldValue, k -> new HashSet<>());
fieldTargets.add(Pair.of(method, setterPolicy));
targets.put(cleanedFieldValue, fieldTargets);
// We have 1 real target
Set<Pair<List<String>, SetterPolicy>> fieldTargetNames = targetsMethodNames.get(cleanedFieldValue);
if (fieldTargetNames == null) {
fieldTargetNames = new HashSet<>();
}
List<String> methodList = new ArrayList<>();
methodList.add(method.getName());
for (Class<?> clazz: method.getParameterTypes()) {
methodList.add(clazz.getCanonicalName());
}
fieldTargetNames.add(Pair.of(methodList, setterPolicy));
targetsMethodNames.put(cleanedFieldValue, fieldTargetNames);
}
} else {
throw new InvalidFieldMethodSignature(method);
}
return this;
} } | public class class_name {
public Parser<RECORD> addParseTarget(final Method method,
final SetterPolicy setterPolicy,
final List<String> fieldValues) {
assembled = false;
if (method == null || fieldValues == null) {
return this; // Nothing to do here // depends on control dependency: [if], data = [none]
}
final Class<?>[] parameters = method.getParameterTypes();
if (
// Setters that receive a String
((parameters.length == 1) && (parameters[0] == String.class)) ||
((parameters.length == 2) && (parameters[0] == String.class) && (parameters[1] == String.class)) ||
// Setters that receive a Long
((parameters.length == 1) && (parameters[0] == Long.class)) ||
((parameters.length == 2) && (parameters[0] == String.class) && (parameters[1] == Long.class)) ||
// Setters that receive a Double
((parameters.length == 1) && (parameters[0] == Double.class)) ||
((parameters.length == 2) && (parameters[0] == String.class) && (parameters[1] == Double.class))
) {
for (final String fieldValue : fieldValues) {
if (fieldValue == null) {
continue;
}
String cleanedFieldValue = cleanupFieldValue(fieldValue);
if (!fieldValue.equals(cleanedFieldValue)) {
LOG.warn("The requested \"{}\" was converted into \"{}\"", fieldValue, cleanedFieldValue); // depends on control dependency: [if], data = [none]
}
// We have 1 real target
Set<Pair<Method, SetterPolicy>> fieldTargets = targets.computeIfAbsent(cleanedFieldValue, k -> new HashSet<>());
fieldTargets.add(Pair.of(method, setterPolicy)); // depends on control dependency: [for], data = [none]
targets.put(cleanedFieldValue, fieldTargets); // depends on control dependency: [for], data = [none]
// We have 1 real target
Set<Pair<List<String>, SetterPolicy>> fieldTargetNames = targetsMethodNames.get(cleanedFieldValue);
if (fieldTargetNames == null) {
fieldTargetNames = new HashSet<>(); // depends on control dependency: [if], data = [none]
}
List<String> methodList = new ArrayList<>();
methodList.add(method.getName()); // depends on control dependency: [for], data = [none]
for (Class<?> clazz: method.getParameterTypes()) {
methodList.add(clazz.getCanonicalName()); // depends on control dependency: [for], data = [clazz]
}
fieldTargetNames.add(Pair.of(methodList, setterPolicy)); // depends on control dependency: [for], data = [none]
targetsMethodNames.put(cleanedFieldValue, fieldTargetNames); // depends on control dependency: [for], data = [none]
}
} else {
throw new InvalidFieldMethodSignature(method);
}
return this;
} } |
public class class_name {
public static String shrink(String string, char c) {
if (string == null) {
return null;
}
string = string.trim();
Pattern pattern = Pattern.compile("\\" + String.valueOf(c) + "{2,}");
return pattern.matcher(string).replaceAll(String.valueOf(c));
} } | public class class_name {
public static String shrink(String string, char c) {
if (string == null) {
return null; // depends on control dependency: [if], data = [none]
}
string = string.trim();
Pattern pattern = Pattern.compile("\\" + String.valueOf(c) + "{2,}");
return pattern.matcher(string).replaceAll(String.valueOf(c));
} } |
public class class_name {
public Observable<ServiceResponse<Page<StampCapacityInner>>> listCapacitiesSinglePageAsync(final String resourceGroupName, final String name) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (name == null) {
throw new IllegalArgumentException("Parameter name is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.listCapacities(resourceGroupName, name, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<StampCapacityInner>>>>() {
@Override
public Observable<ServiceResponse<Page<StampCapacityInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<StampCapacityInner>> result = listCapacitiesDelegate(response);
return Observable.just(new ServiceResponse<Page<StampCapacityInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
} } | public class class_name {
public Observable<ServiceResponse<Page<StampCapacityInner>>> listCapacitiesSinglePageAsync(final String resourceGroupName, final String name) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (name == null) {
throw new IllegalArgumentException("Parameter name is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.listCapacities(resourceGroupName, name, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<StampCapacityInner>>>>() {
@Override
public Observable<ServiceResponse<Page<StampCapacityInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<StampCapacityInner>> result = listCapacitiesDelegate(response);
return Observable.just(new ServiceResponse<Page<StampCapacityInner>>(result.body(), result.response())); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
return Observable.error(t);
} // depends on control dependency: [catch], data = [none]
}
});
} } |
public class class_name {
public BaseDescr declare( PackageDescrBuilder pkg ) throws RecognitionException {
BaseDescr declaration = null;
try {
DeclareDescrBuilder declare = helper.start( pkg,
DeclareDescrBuilder.class,
null );
// 'declare'
match( input,
DRL5Lexer.ID,
DroolsSoftKeywords.DECLARE,
null,
DroolsEditorType.KEYWORD );
if ( state.failed ) return null;
if ( helper.validateIdentifierKey( DroolsSoftKeywords.ENTRY ) ) {
// entry point declaration
declaration = entryPointDeclaration( declare );
} else if( helper.validateIdentifierKey( DroolsSoftKeywords.WINDOW ) ) {
// window declaration
declaration = windowDeclaration( declare );
} else if ( helper.validateIdentifierKey( DroolsSoftKeywords.TRAIT ) ) {
// trait type declaration
// 'trait'
match( input,
DRL5Lexer.ID,
DroolsSoftKeywords.TRAIT,
null,
DroolsEditorType.KEYWORD );
if ( state.failed ) return null;
declaration = typeDeclaration( declare, true );
} else if ( helper.validateIdentifierKey( DroolsSoftKeywords.ENUM ) ) {
match( input,
DRL5Lexer.ID,
DroolsSoftKeywords.ENUM,
null,
DroolsEditorType.KEYWORD );
if ( state.failed ) return null;
declaration = enumDeclaration( declare );
} else {
// class type declaration
declaration = typeDeclaration( declare, false );
}
} catch ( RecognitionException re ) {
reportError( re );
}
return declaration;
} } | public class class_name {
public BaseDescr declare( PackageDescrBuilder pkg ) throws RecognitionException {
BaseDescr declaration = null;
try {
DeclareDescrBuilder declare = helper.start( pkg,
DeclareDescrBuilder.class,
null );
// 'declare'
match( input,
DRL5Lexer.ID,
DroolsSoftKeywords.DECLARE,
null,
DroolsEditorType.KEYWORD );
if ( state.failed ) return null;
if ( helper.validateIdentifierKey( DroolsSoftKeywords.ENTRY ) ) {
// entry point declaration
declaration = entryPointDeclaration( declare ); // depends on control dependency: [if], data = [none]
} else if( helper.validateIdentifierKey( DroolsSoftKeywords.WINDOW ) ) {
// window declaration
declaration = windowDeclaration( declare ); // depends on control dependency: [if], data = [none]
} else if ( helper.validateIdentifierKey( DroolsSoftKeywords.TRAIT ) ) {
// trait type declaration
// 'trait'
match( input,
DRL5Lexer.ID,
DroolsSoftKeywords.TRAIT,
null,
DroolsEditorType.KEYWORD ); // depends on control dependency: [if], data = [none]
if ( state.failed ) return null;
declaration = typeDeclaration( declare, true ); // depends on control dependency: [if], data = [none]
} else if ( helper.validateIdentifierKey( DroolsSoftKeywords.ENUM ) ) {
match( input,
DRL5Lexer.ID,
DroolsSoftKeywords.ENUM,
null,
DroolsEditorType.KEYWORD ); // depends on control dependency: [if], data = [none]
if ( state.failed ) return null;
declaration = enumDeclaration( declare ); // depends on control dependency: [if], data = [none]
} else {
// class type declaration
declaration = typeDeclaration( declare, false ); // depends on control dependency: [if], data = [none]
}
} catch ( RecognitionException re ) {
reportError( re );
}
return declaration;
} } |
public class class_name {
public void registered(ResteasyProviderFactory factory) {
System.out.println("registered - factory = " + factory);
ResourceConstructor constructor = this.resourceClass.getConstructor();
if (constructor == null) {
final Class<?> clazz = this.resourceClass.getClazz();
final Class<?> aClass = DI.getSubTypesWithoutInterfacesAndGeneratedOf(clazz).stream().findFirst().get();
constructor = ResourceBuilder.constructor(aClass);
}
//
if (constructor == null) {
throw new RuntimeException(Messages.MESSAGES.unableToFindPublicConstructorForClass(this.scannableClass.getName()));
} else {
this.constructorInjector = factory.getInjectorFactory().createConstructor(constructor , factory);
this.propertyInjector = factory.getInjectorFactory().createPropertyInjector(this.resourceClass , factory);
}
} } | public class class_name {
public void registered(ResteasyProviderFactory factory) {
System.out.println("registered - factory = " + factory);
ResourceConstructor constructor = this.resourceClass.getConstructor();
if (constructor == null) {
final Class<?> clazz = this.resourceClass.getClazz();
final Class<?> aClass = DI.getSubTypesWithoutInterfacesAndGeneratedOf(clazz).stream().findFirst().get();
constructor = ResourceBuilder.constructor(aClass);
}
//
if (constructor == null) {
throw new RuntimeException(Messages.MESSAGES.unableToFindPublicConstructorForClass(this.scannableClass.getName()));
} else {
this.constructorInjector = factory.getInjectorFactory().createConstructor(constructor , factory); // depends on control dependency: [if], data = [(constructor]
this.propertyInjector = factory.getInjectorFactory().createPropertyInjector(this.resourceClass , factory); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
static void putRecordMarkingAndSend(Channel channel, Xdr rpcRequest) {
// XDR header buffer
List<ByteBuffer> buffers = new LinkedList<>();
buffers.add(ByteBuffer.wrap(rpcRequest.getBuffer(), 0, rpcRequest.getOffset()));
// payload buffer
if (rpcRequest.getPayloads() != null) {
buffers.addAll(rpcRequest.getPayloads());
}
List<ByteBuffer> outBuffers = new ArrayList<>();
int bytesToWrite = 0;
int remainingBuffers = buffers.size();
boolean isLast = false;
for (ByteBuffer buffer : buffers) {
if (bytesToWrite + buffer.remaining() > MTU_SIZE) {
if (outBuffers.isEmpty()) {
LOG.error("too big single byte buffer {}", buffer.remaining());
throw new IllegalArgumentException(
String.format("too big single byte buffer %d", buffer.remaining()));
} else {
sendBuffers(channel, bytesToWrite, outBuffers, isLast);
bytesToWrite = 0;
outBuffers.clear();
}
}
outBuffers.add(buffer);
bytesToWrite += buffer.remaining();
remainingBuffers -= 1;
isLast = (remainingBuffers == 0);
}
// send out remaining buffers
if (!outBuffers.isEmpty()) {
sendBuffers(channel, bytesToWrite, outBuffers, true);
}
} } | public class class_name {
static void putRecordMarkingAndSend(Channel channel, Xdr rpcRequest) {
// XDR header buffer
List<ByteBuffer> buffers = new LinkedList<>();
buffers.add(ByteBuffer.wrap(rpcRequest.getBuffer(), 0, rpcRequest.getOffset()));
// payload buffer
if (rpcRequest.getPayloads() != null) {
buffers.addAll(rpcRequest.getPayloads()); // depends on control dependency: [if], data = [(rpcRequest.getPayloads()]
}
List<ByteBuffer> outBuffers = new ArrayList<>();
int bytesToWrite = 0;
int remainingBuffers = buffers.size();
boolean isLast = false;
for (ByteBuffer buffer : buffers) {
if (bytesToWrite + buffer.remaining() > MTU_SIZE) {
if (outBuffers.isEmpty()) {
LOG.error("too big single byte buffer {}", buffer.remaining()); // depends on control dependency: [if], data = [none]
throw new IllegalArgumentException(
String.format("too big single byte buffer %d", buffer.remaining()));
} else {
sendBuffers(channel, bytesToWrite, outBuffers, isLast); // depends on control dependency: [if], data = [none]
bytesToWrite = 0; // depends on control dependency: [if], data = [none]
outBuffers.clear(); // depends on control dependency: [if], data = [none]
}
}
outBuffers.add(buffer); // depends on control dependency: [for], data = [buffer]
bytesToWrite += buffer.remaining(); // depends on control dependency: [for], data = [buffer]
remainingBuffers -= 1; // depends on control dependency: [for], data = [none]
isLast = (remainingBuffers == 0); // depends on control dependency: [for], data = [none]
}
// send out remaining buffers
if (!outBuffers.isEmpty()) {
sendBuffers(channel, bytesToWrite, outBuffers, true); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private void buildFunGroups(List<AttachedGroup> attachedGroups) {
Iterator<AttachedGroup> groupsIterator = attachedGroups.iterator();
while (groupsIterator.hasNext()) {
AttachedGroup attachedGroup = groupsIterator.next();
Iterator<Token> locationsIterator = attachedGroup.getLocations().iterator();
while (locationsIterator.hasNext()) {
Token locationToken = locationsIterator.next();
addFunGroup(attachedGroup.getName(), Integer.parseInt(locationToken.image) - 1);
}
}
} } | public class class_name {
private void buildFunGroups(List<AttachedGroup> attachedGroups) {
Iterator<AttachedGroup> groupsIterator = attachedGroups.iterator();
while (groupsIterator.hasNext()) {
AttachedGroup attachedGroup = groupsIterator.next();
Iterator<Token> locationsIterator = attachedGroup.getLocations().iterator();
while (locationsIterator.hasNext()) {
Token locationToken = locationsIterator.next();
addFunGroup(attachedGroup.getName(), Integer.parseInt(locationToken.image) - 1); // depends on control dependency: [while], data = [none]
}
}
} } |
public class class_name {
private void hostSubscribe(String eventName, boolean subscribe) {
if (globalEventDispatcher != null) {
try {
globalEventDispatcher.subscribeRemoteEvent(eventName, subscribe);
} catch (Throwable e) {
log.error(
"Error " + (subscribe ? "subscribing to" : "unsubscribing from") + " remote event '" + eventName + "'",
e);
}
}
} } | public class class_name {
private void hostSubscribe(String eventName, boolean subscribe) {
if (globalEventDispatcher != null) {
try {
globalEventDispatcher.subscribeRemoteEvent(eventName, subscribe); // depends on control dependency: [try], data = [none]
} catch (Throwable e) {
log.error(
"Error " + (subscribe ? "subscribing to" : "unsubscribing from") + " remote event '" + eventName + "'",
e);
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
private void indent(TaskListener listener, int depth) {
for (int i = 0; i < depth; i++) {
listener.getLogger().print(' ');
}
} } | public class class_name {
private void indent(TaskListener listener, int depth) {
for (int i = 0; i < depth; i++) {
listener.getLogger().print(' '); // depends on control dependency: [for], data = [none]
}
} } |
public class class_name {
public Period multipliedBy(int scalar) {
if (this == ZERO || scalar == 1) {
return this;
}
return create(
Math.multiplyExact(years, scalar),
Math.multiplyExact(months, scalar),
Math.multiplyExact(days, scalar));
} } | public class class_name {
public Period multipliedBy(int scalar) {
if (this == ZERO || scalar == 1) {
return this; // depends on control dependency: [if], data = [none]
}
return create(
Math.multiplyExact(years, scalar),
Math.multiplyExact(months, scalar),
Math.multiplyExact(days, scalar));
} } |
public class class_name {
public static String createAuthenticationHash(final String userName, final char[] password) {
ByteArrayOutputStream bout = new ByteArrayOutputStream(userName.length() + 20 + password.length);
try {
bout.write(userName.getBytes(UTF_8_CHARSET));
bout.write(":mongo:".getBytes(UTF_8_CHARSET));
bout.write(new String(password).getBytes(UTF_8_CHARSET));
} catch (IOException ioe) {
throw new RuntimeException("impossible", ioe);
}
return hexMD5(bout.toByteArray());
} } | public class class_name {
public static String createAuthenticationHash(final String userName, final char[] password) {
ByteArrayOutputStream bout = new ByteArrayOutputStream(userName.length() + 20 + password.length);
try {
bout.write(userName.getBytes(UTF_8_CHARSET)); // depends on control dependency: [try], data = [none]
bout.write(":mongo:".getBytes(UTF_8_CHARSET)); // depends on control dependency: [try], data = [none]
bout.write(new String(password).getBytes(UTF_8_CHARSET)); // depends on control dependency: [try], data = [none]
} catch (IOException ioe) {
throw new RuntimeException("impossible", ioe);
} // depends on control dependency: [catch], data = [none]
return hexMD5(bout.toByteArray());
} } |
public class class_name {
public java.util.List<String> getDhcpOptionsIds() {
if (dhcpOptionsIds == null) {
dhcpOptionsIds = new com.amazonaws.internal.SdkInternalList<String>();
}
return dhcpOptionsIds;
} } | public class class_name {
public java.util.List<String> getDhcpOptionsIds() {
if (dhcpOptionsIds == null) {
dhcpOptionsIds = new com.amazonaws.internal.SdkInternalList<String>(); // depends on control dependency: [if], data = [none]
}
return dhcpOptionsIds;
} } |
public class class_name {
public void setData(TShortList list) {
this.buffer = CausticUtil.createByteBuffer(list.size() * DataType.SHORT.getByteSize());
final TShortIterator iterator = list.iterator();
while (iterator.hasNext()) {
buffer.putShort(iterator.next());
}
} } | public class class_name {
public void setData(TShortList list) {
this.buffer = CausticUtil.createByteBuffer(list.size() * DataType.SHORT.getByteSize());
final TShortIterator iterator = list.iterator();
while (iterator.hasNext()) {
buffer.putShort(iterator.next()); // depends on control dependency: [while], data = [none]
}
} } |
public class class_name {
@Override
public EClass getIfcConnectionPointGeometry() {
if (ifcConnectionPointGeometryEClass == null) {
ifcConnectionPointGeometryEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(124);
}
return ifcConnectionPointGeometryEClass;
} } | public class class_name {
@Override
public EClass getIfcConnectionPointGeometry() {
if (ifcConnectionPointGeometryEClass == null) {
ifcConnectionPointGeometryEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(124);
// depends on control dependency: [if], data = [none]
}
return ifcConnectionPointGeometryEClass;
} } |
public class class_name {
public static<T extends ImageGray<T>, FD extends TupleDesc> Homography2D_F64
computeTransform( T imageA , T imageB ,
DetectDescribePoint<T,FD> detDesc ,
AssociateDescription<FD> associate ,
ModelMatcher<Homography2D_F64,AssociatedPair> modelMatcher )
{
// get the length of the description
List<Point2D_F64> pointsA = new ArrayList<>();
FastQueue<FD> descA = UtilFeature.createQueue(detDesc,100);
List<Point2D_F64> pointsB = new ArrayList<>();
FastQueue<FD> descB = UtilFeature.createQueue(detDesc,100);
// extract feature locations and descriptions from each image
describeImage(imageA, detDesc, pointsA, descA);
describeImage(imageB, detDesc, pointsB, descB);
// Associate features between the two images
associate.setSource(descA);
associate.setDestination(descB);
associate.associate();
// create a list of AssociatedPairs that tell the model matcher how a feature moved
FastQueue<AssociatedIndex> matches = associate.getMatches();
List<AssociatedPair> pairs = new ArrayList<>();
for( int i = 0; i < matches.size(); i++ ) {
AssociatedIndex match = matches.get(i);
Point2D_F64 a = pointsA.get(match.src);
Point2D_F64 b = pointsB.get(match.dst);
pairs.add( new AssociatedPair(a,b,false));
}
// find the best fit model to describe the change between these images
if( !modelMatcher.process(pairs) )
throw new RuntimeException("Model Matcher failed!");
// return the found image transform
return modelMatcher.getModelParameters().copy();
} } | public class class_name {
public static<T extends ImageGray<T>, FD extends TupleDesc> Homography2D_F64
computeTransform( T imageA , T imageB ,
DetectDescribePoint<T,FD> detDesc ,
AssociateDescription<FD> associate ,
ModelMatcher<Homography2D_F64,AssociatedPair> modelMatcher )
{
// get the length of the description
List<Point2D_F64> pointsA = new ArrayList<>();
FastQueue<FD> descA = UtilFeature.createQueue(detDesc,100);
List<Point2D_F64> pointsB = new ArrayList<>();
FastQueue<FD> descB = UtilFeature.createQueue(detDesc,100);
// extract feature locations and descriptions from each image
describeImage(imageA, detDesc, pointsA, descA);
describeImage(imageB, detDesc, pointsB, descB);
// Associate features between the two images
associate.setSource(descA);
associate.setDestination(descB);
associate.associate();
// create a list of AssociatedPairs that tell the model matcher how a feature moved
FastQueue<AssociatedIndex> matches = associate.getMatches();
List<AssociatedPair> pairs = new ArrayList<>();
for( int i = 0; i < matches.size(); i++ ) {
AssociatedIndex match = matches.get(i);
Point2D_F64 a = pointsA.get(match.src);
Point2D_F64 b = pointsB.get(match.dst);
pairs.add( new AssociatedPair(a,b,false)); // depends on control dependency: [for], data = [none]
}
// find the best fit model to describe the change between these images
if( !modelMatcher.process(pairs) )
throw new RuntimeException("Model Matcher failed!");
// return the found image transform
return modelMatcher.getModelParameters().copy();
} } |
public class class_name {
@Override
public String toDot() {
StringBuilder dotBuilder = new StringBuilder();
dotBuilder.append("digraph G {\n");
for (int state = 0; state < d_stateOffsets.size(); ++state) {
for (int trans = d_stateOffsets.get(state); trans < transitionsUpperBound(state); ++trans)
dotBuilder.append(String.format("%d -> %d [label=\"%c\"]\n",
state, d_transitionTo.get(trans), d_transitionChars[trans]));
if (d_finalStates.get(state))
dotBuilder.append(String.format("%d [peripheries=2,label=\"%d (%d)\"];\n", state, state, d_stateNSuffixes.get(state)));
else
dotBuilder.append(String.format("%d [label=\"%d (%d)\"];\n", state, state, d_stateNSuffixes.get(state)));
}
dotBuilder.append("}");
return dotBuilder.toString();
} } | public class class_name {
@Override
public String toDot() {
StringBuilder dotBuilder = new StringBuilder();
dotBuilder.append("digraph G {\n");
for (int state = 0; state < d_stateOffsets.size(); ++state) {
for (int trans = d_stateOffsets.get(state); trans < transitionsUpperBound(state); ++trans)
dotBuilder.append(String.format("%d -> %d [label=\"%c\"]\n",
state, d_transitionTo.get(trans), d_transitionChars[trans]));
if (d_finalStates.get(state))
dotBuilder.append(String.format("%d [peripheries=2,label=\"%d (%d)\"];\n", state, state, d_stateNSuffixes.get(state)));
else
dotBuilder.append(String.format("%d [label=\"%d (%d)\"];\n", state, state, d_stateNSuffixes.get(state))); // depends on control dependency: [for], data = [state]
}
dotBuilder.append("}");
return dotBuilder.toString();
} } |
public class class_name {
@SuppressWarnings("unchecked")
public static <T> ProClass<T> load(String className, String feature, ErrorHandler loadFailure) {
try {
return new ProClass<T>((Class<T>) ProClass.class.getClassLoader().loadClass(className));
} catch (ClassNotFoundException e) {
loadFailure.handle("Cannot load pro" + className + " in VoltDB community edition to support the feature "
+ feature + '.', e);
}
return (ProClass<T>) NO_CLASS;
} } | public class class_name {
@SuppressWarnings("unchecked")
public static <T> ProClass<T> load(String className, String feature, ErrorHandler loadFailure) {
try {
return new ProClass<T>((Class<T>) ProClass.class.getClassLoader().loadClass(className)); // depends on control dependency: [try], data = [none]
} catch (ClassNotFoundException e) {
loadFailure.handle("Cannot load pro" + className + " in VoltDB community edition to support the feature "
+ feature + '.', e);
} // depends on control dependency: [catch], data = [none]
return (ProClass<T>) NO_CLASS;
} } |
public class class_name {
public boolean isMatching(CmsSiteMatcher matcher) {
boolean result = m_siteMatcherSites.get(matcher) != null;
if (!result) {
// try to match the workplace site
result = isWorkplaceRequest(matcher);
}
return result;
} } | public class class_name {
public boolean isMatching(CmsSiteMatcher matcher) {
boolean result = m_siteMatcherSites.get(matcher) != null;
if (!result) {
// try to match the workplace site
result = isWorkplaceRequest(matcher); // depends on control dependency: [if], data = [none]
}
return result;
} } |
public class class_name {
public boolean isWifiConnected() {
try {
if (connMan == null) {
connMan = (ConnectivityManager) context
.getSystemService(Context.CONNECTIVITY_SERVICE);
}
NetworkInfo wifiNetInfo = connMan.getNetworkInfo(TYPE_WIFI);
NetworkInfo wimaxNetInfo = connMan.getNetworkInfo(TYPE_WIMAX);
if (wifiNetInfo == null && wimaxNetInfo == null) {
Log.e(TAG, "wifiNetworkInfo is null.");
return false;
}
if (wifiNetInfo != null && wifiNetInfo.isConnected()) {
return true;
}
if (wimaxNetInfo != null && wimaxNetInfo.isConnected()) {
return true;
}
} catch (Exception e) {
Log.e(TAG,
"Exception during isWifiConnected(). - "
+ e.getLocalizedMessage());
}
return false;
} } | public class class_name {
public boolean isWifiConnected() {
try {
if (connMan == null) {
connMan = (ConnectivityManager) context
.getSystemService(Context.CONNECTIVITY_SERVICE); // depends on control dependency: [if], data = [none]
}
NetworkInfo wifiNetInfo = connMan.getNetworkInfo(TYPE_WIFI);
NetworkInfo wimaxNetInfo = connMan.getNetworkInfo(TYPE_WIMAX);
if (wifiNetInfo == null && wimaxNetInfo == null) {
Log.e(TAG, "wifiNetworkInfo is null."); // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
if (wifiNetInfo != null && wifiNetInfo.isConnected()) {
return true; // depends on control dependency: [if], data = [none]
}
if (wimaxNetInfo != null && wimaxNetInfo.isConnected()) {
return true; // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
Log.e(TAG,
"Exception during isWifiConnected(). - "
+ e.getLocalizedMessage());
} // depends on control dependency: [catch], data = [none]
return false;
} } |
public class class_name {
public static void config(Config config) {
try {
provider.config(config);
} catch (Throwable t) {
provider = new DefaultLogProvider();
Log log = provider.getLogger(LogFactory.class.getName());
log.error("Fail on logger provider configuration. Reset logging system to default provider.");
log.dump("Logging configuration stack dump:", t);
}
} } | public class class_name {
public static void config(Config config) {
try {
provider.config(config);
// depends on control dependency: [try], data = [none]
} catch (Throwable t) {
provider = new DefaultLogProvider();
Log log = provider.getLogger(LogFactory.class.getName());
log.error("Fail on logger provider configuration. Reset logging system to default provider.");
log.dump("Logging configuration stack dump:", t);
}
// depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public Content getTagletOutput(Doc holder, TagletWriter writer) {
if (header == null || holder.tags(getName()).length == 0) {
return null;
}
return writer.simpleTagOutput(holder.tags(getName()), header);
} } | public class class_name {
public Content getTagletOutput(Doc holder, TagletWriter writer) {
if (header == null || holder.tags(getName()).length == 0) {
return null; // depends on control dependency: [if], data = [none]
}
return writer.simpleTagOutput(holder.tags(getName()), header);
} } |
public class class_name {
public void marshall(ReplicaSettingsUpdate replicaSettingsUpdate, ProtocolMarshaller protocolMarshaller) {
if (replicaSettingsUpdate == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(replicaSettingsUpdate.getRegionName(), REGIONNAME_BINDING);
protocolMarshaller.marshall(replicaSettingsUpdate.getReplicaProvisionedReadCapacityUnits(), REPLICAPROVISIONEDREADCAPACITYUNITS_BINDING);
protocolMarshaller.marshall(replicaSettingsUpdate.getReplicaProvisionedReadCapacityAutoScalingSettingsUpdate(),
REPLICAPROVISIONEDREADCAPACITYAUTOSCALINGSETTINGSUPDATE_BINDING);
protocolMarshaller
.marshall(replicaSettingsUpdate.getReplicaGlobalSecondaryIndexSettingsUpdate(), REPLICAGLOBALSECONDARYINDEXSETTINGSUPDATE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(ReplicaSettingsUpdate replicaSettingsUpdate, ProtocolMarshaller protocolMarshaller) {
if (replicaSettingsUpdate == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(replicaSettingsUpdate.getRegionName(), REGIONNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(replicaSettingsUpdate.getReplicaProvisionedReadCapacityUnits(), REPLICAPROVISIONEDREADCAPACITYUNITS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(replicaSettingsUpdate.getReplicaProvisionedReadCapacityAutoScalingSettingsUpdate(),
REPLICAPROVISIONEDREADCAPACITYAUTOSCALINGSETTINGSUPDATE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller
.marshall(replicaSettingsUpdate.getReplicaGlobalSecondaryIndexSettingsUpdate(), REPLICAGLOBALSECONDARYINDEXSETTINGSUPDATE_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
static final void checkSketchAndMemoryFlags(final Sketch sketch) {
final Memory mem = sketch.getMemory();
if (mem == null) { return; }
final int flags = PreambleUtil.extractFlags(mem);
if (((flags & COMPACT_FLAG_MASK) > 0) ^ sketch.isCompact()) {
throw new SketchesArgumentException("Possible corruption: "
+ "Memory Compact Flag inconsistent with Sketch");
}
if (((flags & ORDERED_FLAG_MASK) > 0) ^ sketch.isOrdered()) {
throw new SketchesArgumentException("Possible corruption: "
+ "Memory Ordered Flag inconsistent with Sketch");
}
} } | public class class_name {
static final void checkSketchAndMemoryFlags(final Sketch sketch) {
final Memory mem = sketch.getMemory();
if (mem == null) { return; } // depends on control dependency: [if], data = [none]
final int flags = PreambleUtil.extractFlags(mem);
if (((flags & COMPACT_FLAG_MASK) > 0) ^ sketch.isCompact()) {
throw new SketchesArgumentException("Possible corruption: "
+ "Memory Compact Flag inconsistent with Sketch");
}
if (((flags & ORDERED_FLAG_MASK) > 0) ^ sketch.isOrdered()) {
throw new SketchesArgumentException("Possible corruption: "
+ "Memory Ordered Flag inconsistent with Sketch");
}
} } |
public class class_name {
@Override
public void cacheResult(List<CommerceCurrency> commerceCurrencies) {
for (CommerceCurrency commerceCurrency : commerceCurrencies) {
if (entityCache.getResult(
CommerceCurrencyModelImpl.ENTITY_CACHE_ENABLED,
CommerceCurrencyImpl.class,
commerceCurrency.getPrimaryKey()) == null) {
cacheResult(commerceCurrency);
}
else {
commerceCurrency.resetOriginalValues();
}
}
} } | public class class_name {
@Override
public void cacheResult(List<CommerceCurrency> commerceCurrencies) {
for (CommerceCurrency commerceCurrency : commerceCurrencies) {
if (entityCache.getResult(
CommerceCurrencyModelImpl.ENTITY_CACHE_ENABLED,
CommerceCurrencyImpl.class,
commerceCurrency.getPrimaryKey()) == null) {
cacheResult(commerceCurrency); // depends on control dependency: [if], data = [none]
}
else {
commerceCurrency.resetOriginalValues(); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void resolveBeanClass(ItemRule itemRule) throws IllegalRuleException {
Iterator<Token[]> it = ItemRule.tokenIterator(itemRule);
if (it != null) {
while (it.hasNext()) {
Token[] tokens = it.next();
if (tokens != null) {
for (Token token : tokens) {
resolveBeanClass(token);
}
}
}
}
} } | public class class_name {
public void resolveBeanClass(ItemRule itemRule) throws IllegalRuleException {
Iterator<Token[]> it = ItemRule.tokenIterator(itemRule);
if (it != null) {
while (it.hasNext()) {
Token[] tokens = it.next();
if (tokens != null) {
for (Token token : tokens) {
resolveBeanClass(token); // depends on control dependency: [for], data = [token]
}
}
}
}
} } |
public class class_name {
String withPath(String[] path) {
StringBuilder sb = new StringBuilder();
if (path.length > 0) {
sb.append(" (with path ");
for (int i = 0; i < path.length - 1; i++) {
sb.append(path[i]);
sb.append(".");
}
sb.append(path[path.length - 1]);
sb.append(") ");
}
return sb.toString();
} } | public class class_name {
String withPath(String[] path) {
StringBuilder sb = new StringBuilder();
if (path.length > 0) {
sb.append(" (with path "); // depends on control dependency: [if], data = [none]
for (int i = 0; i < path.length - 1; i++) {
sb.append(path[i]); // depends on control dependency: [for], data = [i]
sb.append("."); // depends on control dependency: [for], data = [none]
}
sb.append(path[path.length - 1]); // depends on control dependency: [if], data = [none]
sb.append(") "); // depends on control dependency: [if], data = [none]
}
return sb.toString();
} } |
public class class_name {
public boolean delete(String key) {
Jedis jedis = null;
try {
jedis = this.jedisPool.getResource();
jedis.del(SafeEncoder.encode(key));
logger.info("delete key:" + key);
return true;
} catch (Exception e) {
logger.error(e.getMessage(), e);
this.jedisPool.returnBrokenResource(jedis);
} finally {
if (jedis != null) {
this.jedisPool.returnResource(jedis);
}
}
return false;
} } | public class class_name {
public boolean delete(String key) {
Jedis jedis = null;
try {
jedis = this.jedisPool.getResource(); // depends on control dependency: [try], data = [none]
jedis.del(SafeEncoder.encode(key)); // depends on control dependency: [try], data = [none]
logger.info("delete key:" + key); // depends on control dependency: [try], data = [none]
return true; // depends on control dependency: [try], data = [none]
} catch (Exception e) {
logger.error(e.getMessage(), e);
this.jedisPool.returnBrokenResource(jedis);
} finally { // depends on control dependency: [catch], data = [none]
if (jedis != null) {
this.jedisPool.returnResource(jedis); // depends on control dependency: [if], data = [(jedis]
}
}
return false;
} } |
public class class_name {
private static String setStandardHELMFourthSection(List<AnnotationNotation> annotations) {
StringBuilder sb = new StringBuilder();
for (AnnotationNotation annotation : annotations) {
sb.append(annotation.toHELM2() + "|");
}
if (sb.length() > 1) {
sb.setLength(sb.length() - 1);
}
return sb.toString();
} } | public class class_name {
private static String setStandardHELMFourthSection(List<AnnotationNotation> annotations) {
StringBuilder sb = new StringBuilder();
for (AnnotationNotation annotation : annotations) {
sb.append(annotation.toHELM2() + "|");
// depends on control dependency: [for], data = [annotation]
}
if (sb.length() > 1) {
sb.setLength(sb.length() - 1);
// depends on control dependency: [if], data = [(sb.length()]
}
return sb.toString();
} } |
public class class_name {
public Object retrieve(K key, int timeout) {
Object retval = null;
if (isCachingEnabled()) {
CachedObject<V> co = getCachedObject(key);
if (co == null || isCachedObjectExpired(co)) {
misses++;
co = new CachedObject<V>();
co.setBeingRetrieved();
this.storeCachedObject(key, co);
} else if (co.getObject() != null) {
hits++;
retval = co.getObject();
} else {
//the timeout for retrieving an object is used instead of the cache timeout
co = getCachedObjectOnceRetrievedByOtherThread(key, timeout);
if (co == null) {
//this could happen on a rare occasion and may not lead to problems
delayedMisses++;
} else if (co.getObject() == null) { // still null
delayedMisses++;
if (co.isExpired(timeout) && co.isBeingRetrieved()) {
// prolongate retrieval state if cached object is not a designated null
co.setBeingRetrieved();
}
} else {
delayedHits++;
retval = co.getObject();
}
}
}
return retval;
} } | public class class_name {
public Object retrieve(K key, int timeout) {
Object retval = null;
if (isCachingEnabled()) {
CachedObject<V> co = getCachedObject(key);
if (co == null || isCachedObjectExpired(co)) {
misses++; // depends on control dependency: [if], data = [none]
co = new CachedObject<V>(); // depends on control dependency: [if], data = [none]
co.setBeingRetrieved(); // depends on control dependency: [if], data = [none]
this.storeCachedObject(key, co); // depends on control dependency: [if], data = [none]
} else if (co.getObject() != null) {
hits++; // depends on control dependency: [if], data = [none]
retval = co.getObject(); // depends on control dependency: [if], data = [none]
} else {
//the timeout for retrieving an object is used instead of the cache timeout
co = getCachedObjectOnceRetrievedByOtherThread(key, timeout); // depends on control dependency: [if], data = [none]
if (co == null) {
//this could happen on a rare occasion and may not lead to problems
delayedMisses++; // depends on control dependency: [if], data = [none]
} else if (co.getObject() == null) { // still null
delayedMisses++; // depends on control dependency: [if], data = [none]
if (co.isExpired(timeout) && co.isBeingRetrieved()) {
// prolongate retrieval state if cached object is not a designated null
co.setBeingRetrieved(); // depends on control dependency: [if], data = [none]
}
} else {
delayedHits++; // depends on control dependency: [if], data = [none]
retval = co.getObject(); // depends on control dependency: [if], data = [none]
}
}
}
return retval;
} } |
public class class_name {
@Override
public List<FileNotifyInformation> watch () throws CIFSException {
if ( !this.handle.isValid() ) {
throw new SmbException("Watch was broken by tree disconnect");
}
try ( SmbTreeHandleImpl th = this.handle.getTree() ) {
CommonServerMessageBlockRequest req;
NotifyResponse resp = null;
if ( th.isSMB2() ) {
Smb2ChangeNotifyRequest r = new Smb2ChangeNotifyRequest(th.getConfig(), this.handle.getFileId());
r.setCompletionFilter(this.filter);
r.setNotifyFlags(this.recursive ? Smb2ChangeNotifyRequest.SMB2_WATCH_TREE : 0);
req = r;
}
else {
if ( !th.hasCapability(SmbConstants.CAP_NT_SMBS) ) {
throw new SmbUnsupportedOperationException("Not supported without CAP_NT_SMBS");
}
/*
* NtTrans Notify Change Request / Response
*/
req = new NtTransNotifyChange(th.getConfig(), this.handle.getFid(), this.filter, this.recursive);
resp = new NtTransNotifyChangeResponse(th.getConfig());
}
if ( log.isTraceEnabled() ) {
log.trace("Sending NtTransNotifyChange for " + this.handle);
}
try {
resp = th.send(req, resp, RequestParam.NO_TIMEOUT, RequestParam.NO_RETRY);
}
catch ( SmbException e ) {
if ( e.getNtStatus() == 0xC0000120 ) {
// cancelled
log.debug("Request was cancelled", e);
return null;
}
throw e;
}
if ( log.isTraceEnabled() ) {
log.trace("Returned from NtTransNotifyChange " + resp.getErrorCode());
}
if ( !resp.isReceived() ) {
throw new CIFSException("Did not receive response");
}
if ( resp.getErrorCode() == 0x10B ) {
this.handle.markClosed();
}
if ( resp.getErrorCode() == 0x10C ) {
resp.getNotifyInformation().clear();
}
return resp.getNotifyInformation();
}
} } | public class class_name {
@Override
public List<FileNotifyInformation> watch () throws CIFSException {
if ( !this.handle.isValid() ) {
throw new SmbException("Watch was broken by tree disconnect");
}
try ( SmbTreeHandleImpl th = this.handle.getTree() ) {
CommonServerMessageBlockRequest req;
NotifyResponse resp = null;
if ( th.isSMB2() ) {
Smb2ChangeNotifyRequest r = new Smb2ChangeNotifyRequest(th.getConfig(), this.handle.getFileId());
r.setCompletionFilter(this.filter);
r.setNotifyFlags(this.recursive ? Smb2ChangeNotifyRequest.SMB2_WATCH_TREE : 0);
req = r;
}
else {
if ( !th.hasCapability(SmbConstants.CAP_NT_SMBS) ) {
throw new SmbUnsupportedOperationException("Not supported without CAP_NT_SMBS");
}
/*
* NtTrans Notify Change Request / Response
*/
req = new NtTransNotifyChange(th.getConfig(), this.handle.getFid(), this.filter, this.recursive);
resp = new NtTransNotifyChangeResponse(th.getConfig());
}
if ( log.isTraceEnabled() ) {
log.trace("Sending NtTransNotifyChange for " + this.handle);
}
try {
resp = th.send(req, resp, RequestParam.NO_TIMEOUT, RequestParam.NO_RETRY);
}
catch ( SmbException e ) {
if ( e.getNtStatus() == 0xC0000120 ) {
// cancelled
log.debug("Request was cancelled", e); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
throw e;
}
if ( log.isTraceEnabled() ) {
log.trace("Returned from NtTransNotifyChange " + resp.getErrorCode());
}
if ( !resp.isReceived() ) {
throw new CIFSException("Did not receive response");
}
if ( resp.getErrorCode() == 0x10B ) {
this.handle.markClosed();
}
if ( resp.getErrorCode() == 0x10C ) {
resp.getNotifyInformation().clear();
}
return resp.getNotifyInformation();
}
} } |
public class class_name {
public void setPreTransProgram( final String program) {
if ( null == program) {
format.getHeader().createEntry( PRETRANSPROG, DEFAULTSCRIPTPROG);
} else if ( 0 == program.length()){
format.getHeader().createEntry( PRETRANSPROG, DEFAULTSCRIPTPROG);
} else {
format.getHeader().createEntry( PRETRANSPROG, program);
}
} } | public class class_name {
public void setPreTransProgram( final String program) {
if ( null == program) {
format.getHeader().createEntry( PRETRANSPROG, DEFAULTSCRIPTPROG); // depends on control dependency: [if], data = [none]
} else if ( 0 == program.length()){
format.getHeader().createEntry( PRETRANSPROG, DEFAULTSCRIPTPROG); // depends on control dependency: [if], data = [none]
} else {
format.getHeader().createEntry( PRETRANSPROG, program); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String join(final String[] seq, final String delimiter) {
if (null == seq || seq.length < 1) return "";
Assert.notNull(delimiter);
StringBuilder aim = new StringBuilder();
for (int i = 0; i < seq.length; i++) {
if (aim.length() > 0) aim.append(delimiter);
aim.append(seq[i]);
}
return aim.toString();
} } | public class class_name {
public static String join(final String[] seq, final String delimiter) {
if (null == seq || seq.length < 1) return "";
Assert.notNull(delimiter);
StringBuilder aim = new StringBuilder();
for (int i = 0; i < seq.length; i++) {
if (aim.length() > 0) aim.append(delimiter);
aim.append(seq[i]); // depends on control dependency: [for], data = [i]
}
return aim.toString();
} } |
public class class_name {
@Override
public Promise subscribe(String channel) {
try {
// Create topic
ProjectTopicName topicName = ProjectTopicName.of(projectID, channel);
Topic topic = null;
try {
topic = topicAdmin.getTopic(topicName);
} catch (NotFoundException notFound) {
}
if (topic == null) {
topic = topicAdmin.createTopic(topicName);
logger.info("Topic \"" + topic.getName() + "\" created successfully.");
}
// Create subscription
String nodeSubscription;
if (channel.endsWith('.' + nodeID)) {
nodeSubscription = channel;
} else {
nodeSubscription = channel + '-' + nodeID;
}
ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(projectID, nodeSubscription);
Subscription subscription = null;
try {
subscription = subscriptionAdmin.getSubscription(subscriptionName);
} catch (NotFoundException notFound) {
}
if (subscription == null) {
subscription = subscriptionAdmin.createSubscription(subscriptionName, topicName,
PushConfig.getDefaultInstance(), ackDeadlineSeconds);
logger.info("Subscription \"" + subscription.getName() + "\" created successfully.");
}
// Create subscriber
synchronized (subscribers) {
if (!subscribers.containsKey(nodeSubscription)) {
Subscriber.Builder builder = Subscriber.newBuilder(subscriptionName, (message, consumer) -> {
// Message received
try {
// We are running in a netty executor's pool,
// do not create new task.
processReceivedMessage(channel, message.getData().toByteArray());
} finally {
consumer.ack();
}
});
if (channelProvider != null) {
builder.setChannelProvider(channelProvider);
}
if (credentialsProvider != null) {
builder.setCredentialsProvider(credentialsProvider);
}
if (executorProvider != null) {
builder.setExecutorProvider(executorProvider);
builder.setSystemExecutorProvider(executorProvider);
} else {
builder.setExecutorProvider(defaultExecutorProvider);
builder.setSystemExecutorProvider(defaultExecutorProvider);
}
if (headerProvider != null) {
builder.setHeaderProvider(headerProvider);
}
if (maxAckExtensionPeriod != null) {
builder.setMaxAckExtensionPeriod(maxAckExtensionPeriod);
}
if (parallelPullCount > 0) {
builder.setParallelPullCount(parallelPullCount);
}
if (flowControlSettings != null) {
builder.setFlowControlSettings(flowControlSettings);
}
Subscriber subscriber = builder.build();
subscriber.startAsync();
subscribers.put(nodeSubscription, subscriber);
logger.info(
"Subscriber created for subscription \"" + subscriber.getSubscriptionNameString() + "\".");
}
}
} catch (Exception cause) {
return Promise.reject(cause);
}
return Promise.resolve();
} } | public class class_name {
@Override
public Promise subscribe(String channel) {
try {
// Create topic
ProjectTopicName topicName = ProjectTopicName.of(projectID, channel);
Topic topic = null;
try {
topic = topicAdmin.getTopic(topicName);
// depends on control dependency: [try], data = [none]
} catch (NotFoundException notFound) {
}
// depends on control dependency: [catch], data = [none]
if (topic == null) {
topic = topicAdmin.createTopic(topicName);
// depends on control dependency: [if], data = [(topic]
logger.info("Topic \"" + topic.getName() + "\" created successfully.");
// depends on control dependency: [if], data = [none]
}
// Create subscription
String nodeSubscription;
if (channel.endsWith('.' + nodeID)) {
nodeSubscription = channel;
// depends on control dependency: [if], data = [none]
} else {
nodeSubscription = channel + '-' + nodeID;
// depends on control dependency: [if], data = [none]
}
ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(projectID, nodeSubscription);
Subscription subscription = null;
try {
subscription = subscriptionAdmin.getSubscription(subscriptionName);
// depends on control dependency: [try], data = [none]
} catch (NotFoundException notFound) {
}
// depends on control dependency: [catch], data = [none]
if (subscription == null) {
subscription = subscriptionAdmin.createSubscription(subscriptionName, topicName,
PushConfig.getDefaultInstance(), ackDeadlineSeconds);
// depends on control dependency: [if], data = [none]
logger.info("Subscription \"" + subscription.getName() + "\" created successfully.");
// depends on control dependency: [if], data = [none]
}
// Create subscriber
synchronized (subscribers) {
// depends on control dependency: [try], data = [none]
if (!subscribers.containsKey(nodeSubscription)) {
Subscriber.Builder builder = Subscriber.newBuilder(subscriptionName, (message, consumer) -> {
// Message received
try {
// We are running in a netty executor's pool,
// do not create new task.
processReceivedMessage(channel, message.getData().toByteArray());
// depends on control dependency: [try], data = [none]
} finally {
consumer.ack();
}
});
if (channelProvider != null) {
builder.setChannelProvider(channelProvider);
// depends on control dependency: [if], data = [(channelProvider]
}
if (credentialsProvider != null) {
builder.setCredentialsProvider(credentialsProvider);
// depends on control dependency: [if], data = [(credentialsProvider]
}
if (executorProvider != null) {
builder.setExecutorProvider(executorProvider);
// depends on control dependency: [if], data = [(executorProvider]
builder.setSystemExecutorProvider(executorProvider);
// depends on control dependency: [if], data = [(executorProvider]
} else {
builder.setExecutorProvider(defaultExecutorProvider);
// depends on control dependency: [if], data = [none]
builder.setSystemExecutorProvider(defaultExecutorProvider);
// depends on control dependency: [if], data = [none]
}
if (headerProvider != null) {
builder.setHeaderProvider(headerProvider);
// depends on control dependency: [if], data = [(headerProvider]
}
if (maxAckExtensionPeriod != null) {
builder.setMaxAckExtensionPeriod(maxAckExtensionPeriod);
// depends on control dependency: [if], data = [(maxAckExtensionPeriod]
}
if (parallelPullCount > 0) {
builder.setParallelPullCount(parallelPullCount);
// depends on control dependency: [if], data = [(parallelPullCount]
}
if (flowControlSettings != null) {
builder.setFlowControlSettings(flowControlSettings);
// depends on control dependency: [if], data = [(flowControlSettings]
}
Subscriber subscriber = builder.build();
subscriber.startAsync();
subscribers.put(nodeSubscription, subscriber);
logger.info(
"Subscriber created for subscription \"" + subscriber.getSubscriptionNameString() + "\".");
}
}
} catch (Exception cause) {
return Promise.reject(cause);
}
// depends on control dependency: [catch], data = [none]
return Promise.resolve();
} } |
public class class_name {
public Content getPreQualifiedClassLink(LinkInfoImpl.Kind context,
ClassDoc cd, boolean isStrong) {
ContentBuilder classlink = new ContentBuilder();
PackageDoc pd = cd.containingPackage();
if (pd != null && ! configuration.shouldExcludeQualifier(pd.name())) {
classlink.addContent(getPkgName(cd));
}
classlink.addContent(getLink(new LinkInfoImpl(configuration,
context, cd).label(cd.name()).strong(isStrong)));
return classlink;
} } | public class class_name {
public Content getPreQualifiedClassLink(LinkInfoImpl.Kind context,
ClassDoc cd, boolean isStrong) {
ContentBuilder classlink = new ContentBuilder();
PackageDoc pd = cd.containingPackage();
if (pd != null && ! configuration.shouldExcludeQualifier(pd.name())) {
classlink.addContent(getPkgName(cd)); // depends on control dependency: [if], data = [none]
}
classlink.addContent(getLink(new LinkInfoImpl(configuration,
context, cd).label(cd.name()).strong(isStrong)));
return classlink;
} } |
public class class_name {
synchronized void setJobPriority(JobID jobId, JobPriority priority) {
JobInProgress job = jobs.get(jobId);
if (job != null) {
synchronized (taskScheduler) {
JobStatus oldStatus = (JobStatus)job.getStatus().clone();
job.setPriority(priority);
JobStatus newStatus = (JobStatus)job.getStatus().clone();
JobStatusChangeEvent event =
new JobStatusChangeEvent(job, EventType.PRIORITY_CHANGED, oldStatus,
newStatus);
updateJobInProgressListeners(event);
}
} else {
LOG.warn("Trying to change the priority of an unknown job: " + jobId);
}
} } | public class class_name {
synchronized void setJobPriority(JobID jobId, JobPriority priority) {
JobInProgress job = jobs.get(jobId);
if (job != null) {
synchronized (taskScheduler) { // depends on control dependency: [if], data = [none]
JobStatus oldStatus = (JobStatus)job.getStatus().clone();
job.setPriority(priority);
JobStatus newStatus = (JobStatus)job.getStatus().clone();
JobStatusChangeEvent event =
new JobStatusChangeEvent(job, EventType.PRIORITY_CHANGED, oldStatus,
newStatus);
updateJobInProgressListeners(event);
}
} else {
LOG.warn("Trying to change the priority of an unknown job: " + jobId); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
protected MessageContext decodeSoapRequest(final HttpServletRequest request) {
try {
val decoder = new HTTPSOAP11Decoder();
decoder.setParserPool(samlProfileHandlerConfigurationContext.getOpenSamlConfigBean().getParserPool());
decoder.setHttpServletRequest(request);
val binding = new BindingDescriptor();
binding.setId(getClass().getName());
binding.setShortName(getClass().getName());
binding.setSignatureCapable(true);
binding.setSynchronous(true);
decoder.setBindingDescriptor(binding);
decoder.initialize();
decoder.decode();
return decoder.getMessageContext();
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
}
return null;
} } | public class class_name {
protected MessageContext decodeSoapRequest(final HttpServletRequest request) {
try {
val decoder = new HTTPSOAP11Decoder();
decoder.setParserPool(samlProfileHandlerConfigurationContext.getOpenSamlConfigBean().getParserPool()); // depends on control dependency: [try], data = [none]
decoder.setHttpServletRequest(request); // depends on control dependency: [try], data = [none]
val binding = new BindingDescriptor();
binding.setId(getClass().getName()); // depends on control dependency: [try], data = [none]
binding.setShortName(getClass().getName()); // depends on control dependency: [try], data = [none]
binding.setSignatureCapable(true); // depends on control dependency: [try], data = [none]
binding.setSynchronous(true); // depends on control dependency: [try], data = [none]
decoder.setBindingDescriptor(binding); // depends on control dependency: [try], data = [none]
decoder.initialize(); // depends on control dependency: [try], data = [none]
decoder.decode(); // depends on control dependency: [try], data = [none]
return decoder.getMessageContext(); // depends on control dependency: [try], data = [none]
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
return null;
} } |
public class class_name {
@SuppressWarnings("unchecked")
private Map<String, Object> _getRenderFlashMap(FacesContext context)
{
// Note that we don't have to synchronize here, because it is no problem
// if we create more SubKeyMaps with the same subkey, because they are
// totally equal and point to the same entries in the SessionMap.
Map<String, Object> requestMap = context.getExternalContext().getRequestMap();
Map<String, Object> map = (Map<String, Object>) requestMap.get(FLASH_RENDER_MAP);
if (map == null)
{
String token = (String) requestMap.get(FLASH_RENDER_MAP_TOKEN);
String fullToken = FLASH_SESSION_MAP_SUBKEY_PREFIX + SEPARATOR_CHAR + token + SEPARATOR_CHAR;
map = _createSubKeyMap(context, fullToken);
requestMap.put(FLASH_RENDER_MAP, map);
}
return map;
} } | public class class_name {
@SuppressWarnings("unchecked")
private Map<String, Object> _getRenderFlashMap(FacesContext context)
{
// Note that we don't have to synchronize here, because it is no problem
// if we create more SubKeyMaps with the same subkey, because they are
// totally equal and point to the same entries in the SessionMap.
Map<String, Object> requestMap = context.getExternalContext().getRequestMap();
Map<String, Object> map = (Map<String, Object>) requestMap.get(FLASH_RENDER_MAP);
if (map == null)
{
String token = (String) requestMap.get(FLASH_RENDER_MAP_TOKEN);
String fullToken = FLASH_SESSION_MAP_SUBKEY_PREFIX + SEPARATOR_CHAR + token + SEPARATOR_CHAR;
map = _createSubKeyMap(context, fullToken); // depends on control dependency: [if], data = [none]
requestMap.put(FLASH_RENDER_MAP, map); // depends on control dependency: [if], data = [none]
}
return map;
} } |
public class class_name {
public void setReplicaAvailabilityZones(java.util.Collection<String> replicaAvailabilityZones) {
if (replicaAvailabilityZones == null) {
this.replicaAvailabilityZones = null;
return;
}
this.replicaAvailabilityZones = new com.amazonaws.internal.SdkInternalList<String>(replicaAvailabilityZones);
} } | public class class_name {
public void setReplicaAvailabilityZones(java.util.Collection<String> replicaAvailabilityZones) {
if (replicaAvailabilityZones == null) {
this.replicaAvailabilityZones = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.replicaAvailabilityZones = new com.amazonaws.internal.SdkInternalList<String>(replicaAvailabilityZones);
} } |
public class class_name {
@Override
public EClass getIfcPHMeasure() {
if (ifcPHMeasureEClass == null) {
ifcPHMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI).getEClassifiers()
.get(848);
}
return ifcPHMeasureEClass;
} } | public class class_name {
@Override
public EClass getIfcPHMeasure() {
if (ifcPHMeasureEClass == null) {
ifcPHMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI).getEClassifiers()
.get(848);
// depends on control dependency: [if], data = [none]
}
return ifcPHMeasureEClass;
} } |
public class class_name {
@Override
public ValueWrapper putIfAbsent(Object key, Object value) {
Assert.notNull(key, "key parameter is mandatory");
Assert.isAssignable(String.class, key.getClass());
ValueWrapper valueWrapper = get(key);
if (valueWrapper == null) {
try {
this.memcachedClientIF.add((String) key, this.expiration, value).get();
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
catch (ExecutionException e) {
throw new IllegalArgumentException("Error writing key" + key, e);
}
return null;
}
else {
return valueWrapper;
}
} } | public class class_name {
@Override
public ValueWrapper putIfAbsent(Object key, Object value) {
Assert.notNull(key, "key parameter is mandatory");
Assert.isAssignable(String.class, key.getClass());
ValueWrapper valueWrapper = get(key);
if (valueWrapper == null) {
try {
this.memcachedClientIF.add((String) key, this.expiration, value).get(); // depends on control dependency: [try], data = [none]
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
} // depends on control dependency: [catch], data = [none]
catch (ExecutionException e) {
throw new IllegalArgumentException("Error writing key" + key, e);
} // depends on control dependency: [catch], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
else {
return valueWrapper; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static final DoubleMatrix1D replaceValues(DoubleMatrix1D v, double oldValue, double newValue) {
if(v == null){
return null;
}
DoubleFactory1D F1 = (v instanceof SparseDoubleMatrix1D)? DoubleFactory1D.sparse : DoubleFactory1D.dense;
DoubleMatrix1D ret = F1.make(v.size());
for (int i = 0; i < v.size(); i++) {
double vi = v.getQuick(i);
if (Double.compare(oldValue, vi) != 0) {
// no substitution
ret.setQuick(i, vi);
} else {
ret.setQuick(i, newValue);
}
}
return ret;
} } | public class class_name {
public static final DoubleMatrix1D replaceValues(DoubleMatrix1D v, double oldValue, double newValue) {
if(v == null){
return null;
// depends on control dependency: [if], data = [none]
}
DoubleFactory1D F1 = (v instanceof SparseDoubleMatrix1D)? DoubleFactory1D.sparse : DoubleFactory1D.dense;
DoubleMatrix1D ret = F1.make(v.size());
for (int i = 0; i < v.size(); i++) {
double vi = v.getQuick(i);
if (Double.compare(oldValue, vi) != 0) {
// no substitution
ret.setQuick(i, vi);
// depends on control dependency: [if], data = [none]
} else {
ret.setQuick(i, newValue);
// depends on control dependency: [if], data = [none]
}
}
return ret;
} } |
public class class_name {
public static Repository newRepository() {
try {
MapRepositoryBuilder builder = new MapRepositoryBuilder();
return builder.build();
} catch (RepositoryException e) {
// Not expected.
throw new RuntimeException(e);
}
} } | public class class_name {
public static Repository newRepository() {
try {
MapRepositoryBuilder builder = new MapRepositoryBuilder();
return builder.build();
// depends on control dependency: [try], data = [none]
} catch (RepositoryException e) {
// Not expected.
throw new RuntimeException(e);
}
// depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
try {
final byte[] fileContent = IOUtils.toByteArray(new FileInputStream(dependency.getActualFile()));
final ScannerFacade scanner = new ScannerFacade(jsRepository);
final List<JsLibraryResult> results = scanner.scanScript(dependency.getActualFile().getAbsolutePath(), fileContent, 0);
if (results.size() > 0) {
for (JsLibraryResult libraryResult : results) {
final JsLibrary lib = libraryResult.getLibrary();
dependency.setName(lib.getName());
dependency.setVersion(libraryResult.getDetectedVersion());
try {
final PackageURL purl = PackageURLBuilder.aPackageURL().withType("javascript")
.withName(lib.getName()).withVersion(libraryResult.getDetectedVersion()).build();
dependency.addSoftwareIdentifier(new PurlIdentifier(purl, Confidence.HIGHEST));
} catch (MalformedPackageURLException ex) {
LOGGER.debug("Unable to build package url for retireJS", ex);
final GenericIdentifier id = new GenericIdentifier("javascript:" + lib.getName() + "@"
+ libraryResult.getDetectedVersion(), Confidence.HIGHEST);
dependency.addSoftwareIdentifier(id);
}
dependency.addEvidence(EvidenceType.VERSION, "file", "version", libraryResult.getDetectedVersion(), Confidence.HIGH);
dependency.addEvidence(EvidenceType.PRODUCT, "file", "name", libraryResult.getLibrary().getName(), Confidence.HIGH);
final List<Vulnerability> vulns = new ArrayList<>();
final JsVulnerability jsVuln = libraryResult.getVuln();
if (jsVuln.getIdentifiers().containsKey("CVE") || jsVuln.getIdentifiers().containsKey("osvdb")) {
/* CVEs and OSVDB are an array of Strings - each one a unique vulnerability.
* So the JsVulnerability we are operating on may actually be representing
* multiple vulnerabilities. */
//TODO - can we refactor this to avoid russian doll syndrome (i.e. nesting)?
//CSOFF: NestedForDepth
for (Map.Entry<String, List<String>> entry : jsVuln.getIdentifiers().entrySet()) {
final String key = entry.getKey();
final List<String> value = entry.getValue();
if ("CVE".equals(key)) {
for (String cve : value) {
Vulnerability vuln = engine.getDatabase().getVulnerability(StringUtils.trim(cve));
if (vuln == null) {
/* The CVE does not exist in the database and is likely in a
* reserved state. Create a new one without adding it to the
* database and populate it as best as possible. */
vuln = new Vulnerability();
vuln.setName(cve);
vuln.setUnscoredSeverity(jsVuln.getSeverity());
vuln.setSource(Vulnerability.Source.RETIREJS);
}
for (String info : jsVuln.getInfo()) {
vuln.addReference("info", "info", info);
}
vulns.add(vuln);
}
} else if ("osvdb".equals(key)) {
//todo - convert to map/collect
value.stream().forEach((osvdb) -> {
final Vulnerability vuln = new Vulnerability();
vuln.setName(osvdb);
vuln.setSource(Vulnerability.Source.RETIREJS);
vuln.setUnscoredSeverity(jsVuln.getSeverity());
jsVuln.getInfo().forEach((info) -> {
vuln.addReference("info", "info", info);
});
vulns.add(vuln);
});
}
dependency.addVulnerabilities(vulns);
}
//CSON: NestedForDepth
} else {
final Vulnerability individualVuln = new Vulnerability();
/* ISSUE, BUG, etc are all individual vulnerabilities. The result of this
* iteration will be one vulnerability. */
for (Map.Entry<String, List<String>> entry : jsVuln.getIdentifiers().entrySet()) {
final String key = entry.getKey();
final List<String> value = entry.getValue();
// CSOFF: NeedBraces
if (null != key) {
switch (key) {
case "issue":
individualVuln.setName(libraryResult.getLibrary().getName() + " issue: " + value.get(0));
individualVuln.addReference(key, key, value.get(0));
break;
case "bug":
individualVuln.setName(libraryResult.getLibrary().getName() + " bug: " + value.get(0));
individualVuln.addReference(key, key, value.get(0));
break;
case "summary":
if (null == individualVuln.getName()) {
individualVuln.setName(value.get(0));
}
individualVuln.setDescription(value.get(0));
break;
case "release":
individualVuln.addReference(key, key, value.get(0));
break;
default:
break;
}
}
// CSON: NeedBraces
individualVuln.setSource(Vulnerability.Source.RETIREJS);
individualVuln.setUnscoredSeverity(jsVuln.getSeverity());
for (String info : jsVuln.getInfo()) {
individualVuln.addReference("info", "info", info);
}
}
dependency.addVulnerability(individualVuln);
}
}
} else if (getSettings().getBoolean(Settings.KEYS.ANALYZER_RETIREJS_FILTER_NON_VULNERABLE, false)) {
engine.removeDependency(dependency);
}
} catch (IOException | DatabaseException e) {
throw new AnalysisException(e);
}
} } | public class class_name {
@Override
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
try {
final byte[] fileContent = IOUtils.toByteArray(new FileInputStream(dependency.getActualFile()));
final ScannerFacade scanner = new ScannerFacade(jsRepository);
final List<JsLibraryResult> results = scanner.scanScript(dependency.getActualFile().getAbsolutePath(), fileContent, 0);
if (results.size() > 0) {
for (JsLibraryResult libraryResult : results) {
final JsLibrary lib = libraryResult.getLibrary();
dependency.setName(lib.getName());
dependency.setVersion(libraryResult.getDetectedVersion());
try {
final PackageURL purl = PackageURLBuilder.aPackageURL().withType("javascript")
.withName(lib.getName()).withVersion(libraryResult.getDetectedVersion()).build();
dependency.addSoftwareIdentifier(new PurlIdentifier(purl, Confidence.HIGHEST)); // depends on control dependency: [try], data = [none]
} catch (MalformedPackageURLException ex) {
LOGGER.debug("Unable to build package url for retireJS", ex);
final GenericIdentifier id = new GenericIdentifier("javascript:" + lib.getName() + "@"
+ libraryResult.getDetectedVersion(), Confidence.HIGHEST);
dependency.addSoftwareIdentifier(id);
} // depends on control dependency: [catch], data = [none]
dependency.addEvidence(EvidenceType.VERSION, "file", "version", libraryResult.getDetectedVersion(), Confidence.HIGH);
dependency.addEvidence(EvidenceType.PRODUCT, "file", "name", libraryResult.getLibrary().getName(), Confidence.HIGH);
final List<Vulnerability> vulns = new ArrayList<>();
final JsVulnerability jsVuln = libraryResult.getVuln();
if (jsVuln.getIdentifiers().containsKey("CVE") || jsVuln.getIdentifiers().containsKey("osvdb")) {
/* CVEs and OSVDB are an array of Strings - each one a unique vulnerability.
* So the JsVulnerability we are operating on may actually be representing
* multiple vulnerabilities. */
//TODO - can we refactor this to avoid russian doll syndrome (i.e. nesting)?
//CSOFF: NestedForDepth
for (Map.Entry<String, List<String>> entry : jsVuln.getIdentifiers().entrySet()) {
final String key = entry.getKey();
final List<String> value = entry.getValue();
if ("CVE".equals(key)) {
for (String cve : value) {
Vulnerability vuln = engine.getDatabase().getVulnerability(StringUtils.trim(cve));
if (vuln == null) {
/* The CVE does not exist in the database and is likely in a
* reserved state. Create a new one without adding it to the
* database and populate it as best as possible. */
vuln = new Vulnerability(); // depends on control dependency: [if], data = [none]
vuln.setName(cve); // depends on control dependency: [if], data = [none]
vuln.setUnscoredSeverity(jsVuln.getSeverity()); // depends on control dependency: [if], data = [none]
vuln.setSource(Vulnerability.Source.RETIREJS); // depends on control dependency: [if], data = [none]
}
for (String info : jsVuln.getInfo()) {
vuln.addReference("info", "info", info); // depends on control dependency: [for], data = [info]
}
vulns.add(vuln); // depends on control dependency: [for], data = [none]
}
} else if ("osvdb".equals(key)) {
//todo - convert to map/collect
value.stream().forEach((osvdb) -> {
final Vulnerability vuln = new Vulnerability(); // depends on control dependency: [if], data = [none]
vuln.setName(osvdb); // depends on control dependency: [if], data = [none]
vuln.setSource(Vulnerability.Source.RETIREJS); // depends on control dependency: [if], data = [none]
vuln.setUnscoredSeverity(jsVuln.getSeverity()); // depends on control dependency: [if], data = [none]
jsVuln.getInfo().forEach((info) -> {
vuln.addReference("info", "info", info); // depends on control dependency: [if], data = [none]
});
vulns.add(vuln); // depends on control dependency: [for], data = [none]
});
}
dependency.addVulnerabilities(vulns);
}
//CSON: NestedForDepth
} else {
final Vulnerability individualVuln = new Vulnerability();
/* ISSUE, BUG, etc are all individual vulnerabilities. The result of this
* iteration will be one vulnerability. */
for (Map.Entry<String, List<String>> entry : jsVuln.getIdentifiers().entrySet()) {
final String key = entry.getKey();
final List<String> value = entry.getValue();
// CSOFF: NeedBraces
if (null != key) {
switch (key) {
case "issue":
individualVuln.setName(libraryResult.getLibrary().getName() + " issue: " + value.get(0));
individualVuln.addReference(key, key, value.get(0));
break;
case "bug":
individualVuln.setName(libraryResult.getLibrary().getName() + " bug: " + value.get(0));
individualVuln.addReference(key, key, value.get(0));
break;
case "summary":
if (null == individualVuln.getName()) {
individualVuln.setName(value.get(0));
}
individualVuln.setDescription(value.get(0));
break;
case "release":
individualVuln.addReference(key, key, value.get(0));
break;
default:
break;
}
}
// CSON: NeedBraces
individualVuln.setSource(Vulnerability.Source.RETIREJS);
individualVuln.setUnscoredSeverity(jsVuln.getSeverity());
for (String info : jsVuln.getInfo()) {
individualVuln.addReference("info", "info", info);
}
}
dependency.addVulnerability(individualVuln);
}
}
} else if (getSettings().getBoolean(Settings.KEYS.ANALYZER_RETIREJS_FILTER_NON_VULNERABLE, false)) {
engine.removeDependency(dependency);
}
} catch (IOException | DatabaseException e) {
throw new AnalysisException(e);
}
} } |
public class class_name {
protected void releaseManagedObjectContext() {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "releaseManagedObjectContext : " + ivEjbManagedObjectContext);
if (ivEjbManagedObjectContext != null) {
ivEjbManagedObjectContext.release();
}
} } | public class class_name {
protected void releaseManagedObjectContext() {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "releaseManagedObjectContext : " + ivEjbManagedObjectContext);
if (ivEjbManagedObjectContext != null) {
ivEjbManagedObjectContext.release(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public byte[] getAuthenticatedAttributeBytes(byte secondDigest[], Calendar signingTime, byte[] ocsp) {
try {
return getAuthenticatedAttributeSet(secondDigest, signingTime, ocsp).getEncoded(ASN1Encoding.DER);
}
catch (Exception e) {
throw new ExceptionConverter(e);
}
} } | public class class_name {
public byte[] getAuthenticatedAttributeBytes(byte secondDigest[], Calendar signingTime, byte[] ocsp) {
try {
return getAuthenticatedAttributeSet(secondDigest, signingTime, ocsp).getEncoded(ASN1Encoding.DER); // depends on control dependency: [try], data = [none]
}
catch (Exception e) {
throw new ExceptionConverter(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void setMessage(String data) throws HttpMalformedHeaderException {
clear();
try {
if (!this.parse(data)) {
mMalformedHeader = true;
}
} catch (Exception e) {
mMalformedHeader = true;
}
if (mMalformedHeader) {
throw new HttpMalformedHeaderException();
}
} } | public class class_name {
public void setMessage(String data) throws HttpMalformedHeaderException {
clear();
try {
if (!this.parse(data)) {
mMalformedHeader = true;
// depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
mMalformedHeader = true;
}
if (mMalformedHeader) {
throw new HttpMalformedHeaderException();
}
} } |
public class class_name {
private void invalidateWeekFields()
{
if (stamp[WEEK_OF_MONTH] != COMPUTED &&
stamp[WEEK_OF_YEAR] != COMPUTED) {
return;
}
// We have to check the new values of these fields after changing
// firstDayOfWeek and/or minimalDaysInFirstWeek. If the field values
// have been changed, then set the new values. (4822110)
Calendar cal = (Calendar) clone();
cal.setLenient(true);
cal.clear(WEEK_OF_MONTH);
cal.clear(WEEK_OF_YEAR);
if (stamp[WEEK_OF_MONTH] == COMPUTED) {
int weekOfMonth = cal.get(WEEK_OF_MONTH);
if (fields[WEEK_OF_MONTH] != weekOfMonth) {
fields[WEEK_OF_MONTH] = weekOfMonth;
}
}
if (stamp[WEEK_OF_YEAR] == COMPUTED) {
int weekOfYear = cal.get(WEEK_OF_YEAR);
if (fields[WEEK_OF_YEAR] != weekOfYear) {
fields[WEEK_OF_YEAR] = weekOfYear;
}
}
} } | public class class_name {
private void invalidateWeekFields()
{
if (stamp[WEEK_OF_MONTH] != COMPUTED &&
stamp[WEEK_OF_YEAR] != COMPUTED) {
return; // depends on control dependency: [if], data = [none]
}
// We have to check the new values of these fields after changing
// firstDayOfWeek and/or minimalDaysInFirstWeek. If the field values
// have been changed, then set the new values. (4822110)
Calendar cal = (Calendar) clone();
cal.setLenient(true);
cal.clear(WEEK_OF_MONTH);
cal.clear(WEEK_OF_YEAR);
if (stamp[WEEK_OF_MONTH] == COMPUTED) {
int weekOfMonth = cal.get(WEEK_OF_MONTH);
if (fields[WEEK_OF_MONTH] != weekOfMonth) {
fields[WEEK_OF_MONTH] = weekOfMonth; // depends on control dependency: [if], data = [none]
}
}
if (stamp[WEEK_OF_YEAR] == COMPUTED) {
int weekOfYear = cal.get(WEEK_OF_YEAR);
if (fields[WEEK_OF_YEAR] != weekOfYear) {
fields[WEEK_OF_YEAR] = weekOfYear; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void setAllowedPrincipals(java.util.Collection<AllowedPrincipal> allowedPrincipals) {
if (allowedPrincipals == null) {
this.allowedPrincipals = null;
return;
}
this.allowedPrincipals = new com.amazonaws.internal.SdkInternalList<AllowedPrincipal>(allowedPrincipals);
} } | public class class_name {
public void setAllowedPrincipals(java.util.Collection<AllowedPrincipal> allowedPrincipals) {
if (allowedPrincipals == null) {
this.allowedPrincipals = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.allowedPrincipals = new com.amazonaws.internal.SdkInternalList<AllowedPrincipal>(allowedPrincipals);
} } |
public class class_name {
@Override
public void registerFactory(String name, Class<? extends ChannelFactory> factory) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "registerFactory: " + name + "; " + factory);
}
synchronized (this.factories) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) {
Class<? extends ChannelFactory> prevFactory = this.factories.get(name);
if (null != prevFactory && factory != prevFactory) {
Tr.event(tc, "WARNING: overlaying existing factory: " + prevFactory);
}
}
this.factories.put(name, factory);
} // end-sync
// now that we have a new factory type, tell ChannelUtils to process
// any delayed config that might be waiting for it
ChannelUtils.loadConfig(null);
} } | public class class_name {
@Override
public void registerFactory(String name, Class<? extends ChannelFactory> factory) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "registerFactory: " + name + "; " + factory); // depends on control dependency: [if], data = [none]
}
synchronized (this.factories) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) {
Class<? extends ChannelFactory> prevFactory = this.factories.get(name); // depends on control dependency: [if], data = [none]
if (null != prevFactory && factory != prevFactory) {
Tr.event(tc, "WARNING: overlaying existing factory: " + prevFactory); // depends on control dependency: [if], data = [none]
}
}
this.factories.put(name, factory);
} // end-sync
// now that we have a new factory type, tell ChannelUtils to process
// any delayed config that might be waiting for it
ChannelUtils.loadConfig(null);
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.