code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
|---|---|
public class class_name {
public void cancelRemoteForwarding(String bindAddress, int bindPort,
boolean killActiveTunnels) throws SshException {
String key = generateKey(bindAddress, bindPort);
boolean killedTunnels = false;
if (killActiveTunnels) {
try {
ActiveTunnel[] tunnels = getRemoteForwardingTunnels(
bindAddress, bindPort);
if (tunnels != null) {
for (int i = 0; i < tunnels.length; i++) {
killedTunnels = true;
tunnels[i].stop();
}
}
} catch (IOException ex) {
}
incomingtunnels.remove(key);
}
if (!remoteforwardings.containsKey(key)) {
if (killActiveTunnels && killedTunnels) {
return;
}
throw new SshException("Remote forwarding has not been started on "
+ key, SshException.FORWARDING_ERROR);
}
// Check to see whether this is local or remote
if (ssh == null)
return;
ssh.cancelRemoteForwarding(bindAddress, bindPort);
String destination = (String) remoteforwardings.get(key);
int idx = destination.indexOf(":");
String hostToConnect;
int portToConnect;
if (idx == -1) {
throw new SshException(
"Invalid port reference in remote forwarding key!",
SshException.INTERNAL_ERROR);
}
hostToConnect = destination.substring(0, idx);
portToConnect = Integer.parseInt(destination.substring(idx + 1));
for (int i = 0; i < clientlisteners.size(); i++) {
if (clientlisteners.elementAt(i) != null) {
((ForwardingClientListener) clientlisteners.elementAt(i))
.forwardingStopped(
ForwardingClientListener.REMOTE_FORWARDING,
key, hostToConnect, portToConnect);
}
}
remoteforwardings.remove(key);
} }
|
public class class_name {
public void cancelRemoteForwarding(String bindAddress, int bindPort,
boolean killActiveTunnels) throws SshException {
String key = generateKey(bindAddress, bindPort);
boolean killedTunnels = false;
if (killActiveTunnels) {
try {
ActiveTunnel[] tunnels = getRemoteForwardingTunnels(
bindAddress, bindPort);
if (tunnels != null) {
for (int i = 0; i < tunnels.length; i++) {
killedTunnels = true; // depends on control dependency: [for], data = [none]
tunnels[i].stop(); // depends on control dependency: [for], data = [i]
}
}
} catch (IOException ex) {
} // depends on control dependency: [catch], data = [none]
incomingtunnels.remove(key);
}
if (!remoteforwardings.containsKey(key)) {
if (killActiveTunnels && killedTunnels) {
return; // depends on control dependency: [if], data = [none]
}
throw new SshException("Remote forwarding has not been started on "
+ key, SshException.FORWARDING_ERROR);
}
// Check to see whether this is local or remote
if (ssh == null)
return;
ssh.cancelRemoteForwarding(bindAddress, bindPort);
String destination = (String) remoteforwardings.get(key);
int idx = destination.indexOf(":");
String hostToConnect;
int portToConnect;
if (idx == -1) {
throw new SshException(
"Invalid port reference in remote forwarding key!",
SshException.INTERNAL_ERROR);
}
hostToConnect = destination.substring(0, idx);
portToConnect = Integer.parseInt(destination.substring(idx + 1));
for (int i = 0; i < clientlisteners.size(); i++) {
if (clientlisteners.elementAt(i) != null) {
((ForwardingClientListener) clientlisteners.elementAt(i))
.forwardingStopped(
ForwardingClientListener.REMOTE_FORWARDING,
key, hostToConnect, portToConnect);
}
}
remoteforwardings.remove(key);
} }
|
public class class_name {
private BudgetYear1DataType getBudgetYear1DataType(
BudgetPeriodDto periodInfo) {
BudgetYear1DataType budgetYear = BudgetYear1DataType.Factory
.newInstance();
if (periodInfo != null) {
budgetYear.setBudgetPeriodStartDate(s2SDateTimeService
.convertDateToCalendar(periodInfo.getStartDate()));
budgetYear.setBudgetPeriodEndDate(s2SDateTimeService
.convertDateToCalendar(periodInfo.getEndDate()));
BudgetPeriod.Enum budgetPeriod = BudgetPeriod.Enum
.forInt(periodInfo.getBudgetPeriod());
budgetYear.setBudgetPeriod(budgetPeriod);
budgetYear.setKeyPersons(getKeyPersons(periodInfo));
budgetYear.setOtherPersonnel(getOtherPersonnel(periodInfo));
if (periodInfo.getTotalCompensation() != null) {
budgetYear.setTotalCompensation(periodInfo
.getTotalCompensation().bigDecimalValue());
}
budgetYear.setEquipment(getEquipment(periodInfo));
budgetYear.setTravel(getTravel(periodInfo));
budgetYear
.setParticipantTraineeSupportCosts(getParticipantTraineeSupportCosts(periodInfo));
budgetYear.setOtherDirectCosts(getOtherDirectCosts(periodInfo));
budgetYear.setDirectCosts(periodInfo.getDirectCostsTotal()
.bigDecimalValue());
IndirectCosts indirectCosts = getIndirectCosts(periodInfo);
if (indirectCosts != null) {
budgetYear.setIndirectCosts(indirectCosts);
budgetYear.setTotalCosts(periodInfo.getDirectCostsTotal().bigDecimalValue().add(indirectCosts.getTotalIndirectCosts()));
}else{
budgetYear.setTotalCosts(periodInfo.getDirectCostsTotal().bigDecimalValue());
}
budgetYear.setCognizantFederalAgency(periodInfo
.getCognizantFedAgency());
}
return budgetYear;
} }
|
public class class_name {
private BudgetYear1DataType getBudgetYear1DataType(
BudgetPeriodDto periodInfo) {
BudgetYear1DataType budgetYear = BudgetYear1DataType.Factory
.newInstance();
if (periodInfo != null) {
budgetYear.setBudgetPeriodStartDate(s2SDateTimeService
.convertDateToCalendar(periodInfo.getStartDate())); // depends on control dependency: [if], data = [none]
budgetYear.setBudgetPeriodEndDate(s2SDateTimeService
.convertDateToCalendar(periodInfo.getEndDate())); // depends on control dependency: [if], data = [none]
BudgetPeriod.Enum budgetPeriod = BudgetPeriod.Enum
.forInt(periodInfo.getBudgetPeriod());
budgetYear.setBudgetPeriod(budgetPeriod); // depends on control dependency: [if], data = [none]
budgetYear.setKeyPersons(getKeyPersons(periodInfo)); // depends on control dependency: [if], data = [(periodInfo]
budgetYear.setOtherPersonnel(getOtherPersonnel(periodInfo)); // depends on control dependency: [if], data = [(periodInfo]
if (periodInfo.getTotalCompensation() != null) {
budgetYear.setTotalCompensation(periodInfo
.getTotalCompensation().bigDecimalValue()); // depends on control dependency: [if], data = [none]
}
budgetYear.setEquipment(getEquipment(periodInfo)); // depends on control dependency: [if], data = [(periodInfo]
budgetYear.setTravel(getTravel(periodInfo)); // depends on control dependency: [if], data = [(periodInfo]
budgetYear
.setParticipantTraineeSupportCosts(getParticipantTraineeSupportCosts(periodInfo)); // depends on control dependency: [if], data = [none]
budgetYear.setOtherDirectCosts(getOtherDirectCosts(periodInfo)); // depends on control dependency: [if], data = [(periodInfo]
budgetYear.setDirectCosts(periodInfo.getDirectCostsTotal()
.bigDecimalValue()); // depends on control dependency: [if], data = [none]
IndirectCosts indirectCosts = getIndirectCosts(periodInfo);
if (indirectCosts != null) {
budgetYear.setIndirectCosts(indirectCosts); // depends on control dependency: [if], data = [(indirectCosts]
budgetYear.setTotalCosts(periodInfo.getDirectCostsTotal().bigDecimalValue().add(indirectCosts.getTotalIndirectCosts())); // depends on control dependency: [if], data = [(indirectCosts]
}else{
budgetYear.setTotalCosts(periodInfo.getDirectCostsTotal().bigDecimalValue()); // depends on control dependency: [if], data = [none]
}
budgetYear.setCognizantFederalAgency(periodInfo
.getCognizantFedAgency()); // depends on control dependency: [if], data = [none]
}
return budgetYear;
} }
|
public class class_name {
public static <T extends Comparable<? super T>, L extends List<T>> L sortThis(L list)
{
if (list instanceof MutableList<?>)
{
((MutableList<T>) list).sortThis();
}
else if (list instanceof ArrayList)
{
ArrayListIterate.sortThis((ArrayList<T>) list);
}
else
{
if (list.size() > 1)
{
Collections.sort(list);
}
}
return list;
} }
|
public class class_name {
public static <T extends Comparable<? super T>, L extends List<T>> L sortThis(L list)
{
if (list instanceof MutableList<?>)
{
((MutableList<T>) list).sortThis(); // depends on control dependency: [if], data = [)]
}
else if (list instanceof ArrayList)
{
ArrayListIterate.sortThis((ArrayList<T>) list); // depends on control dependency: [if], data = [none]
}
else
{
if (list.size() > 1)
{
Collections.sort(list); // depends on control dependency: [if], data = [none]
}
}
return list;
} }
|
public class class_name {
@Override
public void toXML(final StringBuilder builder,
final ConfigVerification errors)
{
SurrogateModes surMode = controller.getSurrogates();
String wikiEncoding = encodingField.getText();
if (wikiEncoding.length() == 0) {
errors.add(new ConfigItem(ConfigItemTypes.WARNING,
ConfigErrorKeys.MISSING_VALUE,
"The CharacterEncoding was not set."));
}
builder.append("\t<input>\r\n");
builder.append("\t\t<MODE_SURROGATES>" + surMode
+ "</MODE_SURROGATES>\r\n");
builder.append("\t\t<WIKIPEDIA_ENCODING>" + wikiEncoding
+ "</WIKIPEDIA_ENCODING>\r\n");
ArchiveRegistry reg = controller.getArchives();
int size = reg.getRowCount();
ArchiveDescription archive;
InputType type;
String archivePath;
long start;
if(size==0){
errors.add(new ConfigItem(ConfigItemTypes.WARNING,
ConfigErrorKeys.MISSING_VALUE,
"No source file has been set."));
}
for (int i = 0; i < size; i++) {
archive = reg.get(i);
type = archive.getType();
switch (type) {
case XML:
break;
case BZIP2:
//bzip is always enabled - nothing to check here
break;
case SEVENZIP:
if (!controller.is7ZipEnabled()) {
errors.add(new ConfigItem(ConfigItemTypes.ERROR,
ConfigErrorKeys.ILLEGAL_INPUT_FILE,
"The SevenUip mode is not " + "activated"));
}
break;
}
archivePath = archive.getPath();
if (archivePath.length() == 0) {
errors.add(new ConfigItem(ConfigItemTypes.ERROR,
ConfigErrorKeys.PATH_NOT_SET,
"The archive path is missing"));
}
start = archive.getStartPosition();
if (start < 0) {
errors.add(new ConfigItem(ConfigItemTypes.ERROR,
ConfigErrorKeys.VALUE_OUT_OF_RANGE,
"The archive start value should be at least 0"));
}
builder.append("\t\t<archive>\r\n");
builder.append("\t\t\t<type>" + type + "</type>\r\n");
builder.append("\t\t\t<path>\"" + archivePath + "\"</path>\r\n");
builder.append("\t\t\t<start>" + start + "</start>\r\n");
builder.append("\t\t</archive>\r\n");
}
builder.append("\t</input>\r\n");
} }
|
public class class_name {
@Override
public void toXML(final StringBuilder builder,
final ConfigVerification errors)
{
SurrogateModes surMode = controller.getSurrogates();
String wikiEncoding = encodingField.getText();
if (wikiEncoding.length() == 0) {
errors.add(new ConfigItem(ConfigItemTypes.WARNING,
ConfigErrorKeys.MISSING_VALUE,
"The CharacterEncoding was not set.")); // depends on control dependency: [if], data = [none]
}
builder.append("\t<input>\r\n");
builder.append("\t\t<MODE_SURROGATES>" + surMode
+ "</MODE_SURROGATES>\r\n");
builder.append("\t\t<WIKIPEDIA_ENCODING>" + wikiEncoding
+ "</WIKIPEDIA_ENCODING>\r\n");
ArchiveRegistry reg = controller.getArchives();
int size = reg.getRowCount();
ArchiveDescription archive;
InputType type;
String archivePath;
long start;
if(size==0){
errors.add(new ConfigItem(ConfigItemTypes.WARNING,
ConfigErrorKeys.MISSING_VALUE,
"No source file has been set.")); // depends on control dependency: [if], data = [none]
}
for (int i = 0; i < size; i++) {
archive = reg.get(i); // depends on control dependency: [for], data = [i]
type = archive.getType(); // depends on control dependency: [for], data = [none]
switch (type) {
case XML:
break;
case BZIP2:
//bzip is always enabled - nothing to check here
break;
case SEVENZIP:
if (!controller.is7ZipEnabled()) {
errors.add(new ConfigItem(ConfigItemTypes.ERROR,
ConfigErrorKeys.ILLEGAL_INPUT_FILE,
"The SevenUip mode is not " + "activated")); // depends on control dependency: [if], data = [none]
}
break;
}
archivePath = archive.getPath(); // depends on control dependency: [for], data = [none]
if (archivePath.length() == 0) {
errors.add(new ConfigItem(ConfigItemTypes.ERROR,
ConfigErrorKeys.PATH_NOT_SET,
"The archive path is missing")); // depends on control dependency: [if], data = [none]
}
start = archive.getStartPosition(); // depends on control dependency: [for], data = [none]
if (start < 0) {
errors.add(new ConfigItem(ConfigItemTypes.ERROR,
ConfigErrorKeys.VALUE_OUT_OF_RANGE,
"The archive start value should be at least 0")); // depends on control dependency: [if], data = [none]
}
builder.append("\t\t<archive>\r\n"); // depends on control dependency: [for], data = [none]
builder.append("\t\t\t<type>" + type + "</type>\r\n"); // depends on control dependency: [for], data = [none]
builder.append("\t\t\t<path>\"" + archivePath + "\"</path>\r\n"); // depends on control dependency: [for], data = [none]
builder.append("\t\t\t<start>" + start + "</start>\r\n"); // depends on control dependency: [for], data = [none]
builder.append("\t\t</archive>\r\n"); // depends on control dependency: [for], data = [none]
}
builder.append("\t</input>\r\n");
} }
|
public class class_name {
@Override
public boolean sqlMapSelectByExampleWithBLOBsElementGenerated(XmlElement element, IntrospectedTable introspectedTable) {
// 生成查询语句
XmlElement selectOneWithBLOBsElement = new XmlElement("select");
// 添加注释(!!!必须添加注释,overwrite覆盖生成时,@see XmlFileMergerJaxp.isGeneratedNode会去判断注释中是否存在OLD_ELEMENT_TAGS中的一点,例子:@mbg.generated)
commentGenerator.addComment(selectOneWithBLOBsElement);
// 添加ID
selectOneWithBLOBsElement.addAttribute(new Attribute("id", METHOD_SELECT_ONE_BY_EXAMPLE_WITH_BLOBS));
// 添加返回类型
selectOneWithBLOBsElement.addAttribute(new Attribute("resultMap", introspectedTable.getResultMapWithBLOBsId()));
// 添加参数类型
selectOneWithBLOBsElement.addAttribute(new Attribute("parameterType", introspectedTable.getExampleType()));
// 添加查询SQL
selectOneWithBLOBsElement.addElement(new TextElement("select"));
StringBuilder sb = new StringBuilder();
if (stringHasValue(introspectedTable.getSelectByExampleQueryId())) {
sb.append('\'');
sb.append(introspectedTable.getSelectByExampleQueryId());
sb.append("' as QUERYID,");
selectOneWithBLOBsElement.addElement(new TextElement(sb.toString()));
}
selectOneWithBLOBsElement.addElement(XmlElementGeneratorTools.getBaseColumnListElement(introspectedTable));
selectOneWithBLOBsElement.addElement(new TextElement(","));
selectOneWithBLOBsElement.addElement(XmlElementGeneratorTools.getBlobColumnListElement(introspectedTable));
sb.setLength(0);
sb.append("from ");
sb.append(introspectedTable.getAliasedFullyQualifiedTableNameAtRuntime());
selectOneWithBLOBsElement.addElement(new TextElement(sb.toString()));
selectOneWithBLOBsElement.addElement(XmlElementGeneratorTools.getExampleIncludeElement(introspectedTable));
XmlElement ifElement1 = new XmlElement("if");
ifElement1.addAttribute(new Attribute("test", "orderByClause != null")); //$NON-NLS-2$
ifElement1.addElement(new TextElement("order by ${orderByClause}"));
selectOneWithBLOBsElement.addElement(ifElement1);
// 只查询一条
selectOneWithBLOBsElement.addElement(new TextElement("limit 1"));
this.selectOneByExampleWithBLOBsEle = selectOneWithBLOBsElement;
return super.sqlMapSelectByExampleWithBLOBsElementGenerated(element, introspectedTable);
} }
|
public class class_name {
@Override
public boolean sqlMapSelectByExampleWithBLOBsElementGenerated(XmlElement element, IntrospectedTable introspectedTable) {
// 生成查询语句
XmlElement selectOneWithBLOBsElement = new XmlElement("select");
// 添加注释(!!!必须添加注释,overwrite覆盖生成时,@see XmlFileMergerJaxp.isGeneratedNode会去判断注释中是否存在OLD_ELEMENT_TAGS中的一点,例子:@mbg.generated)
commentGenerator.addComment(selectOneWithBLOBsElement);
// 添加ID
selectOneWithBLOBsElement.addAttribute(new Attribute("id", METHOD_SELECT_ONE_BY_EXAMPLE_WITH_BLOBS));
// 添加返回类型
selectOneWithBLOBsElement.addAttribute(new Attribute("resultMap", introspectedTable.getResultMapWithBLOBsId()));
// 添加参数类型
selectOneWithBLOBsElement.addAttribute(new Attribute("parameterType", introspectedTable.getExampleType()));
// 添加查询SQL
selectOneWithBLOBsElement.addElement(new TextElement("select"));
StringBuilder sb = new StringBuilder();
if (stringHasValue(introspectedTable.getSelectByExampleQueryId())) {
sb.append('\''); // depends on control dependency: [if], data = [none]
sb.append(introspectedTable.getSelectByExampleQueryId()); // depends on control dependency: [if], data = [none]
sb.append("' as QUERYID,"); // depends on control dependency: [if], data = [none]
selectOneWithBLOBsElement.addElement(new TextElement(sb.toString())); // depends on control dependency: [if], data = [none]
}
selectOneWithBLOBsElement.addElement(XmlElementGeneratorTools.getBaseColumnListElement(introspectedTable));
selectOneWithBLOBsElement.addElement(new TextElement(","));
selectOneWithBLOBsElement.addElement(XmlElementGeneratorTools.getBlobColumnListElement(introspectedTable));
sb.setLength(0);
sb.append("from ");
sb.append(introspectedTable.getAliasedFullyQualifiedTableNameAtRuntime());
selectOneWithBLOBsElement.addElement(new TextElement(sb.toString()));
selectOneWithBLOBsElement.addElement(XmlElementGeneratorTools.getExampleIncludeElement(introspectedTable));
XmlElement ifElement1 = new XmlElement("if");
ifElement1.addAttribute(new Attribute("test", "orderByClause != null")); //$NON-NLS-2$
ifElement1.addElement(new TextElement("order by ${orderByClause}"));
selectOneWithBLOBsElement.addElement(ifElement1);
// 只查询一条
selectOneWithBLOBsElement.addElement(new TextElement("limit 1"));
this.selectOneByExampleWithBLOBsEle = selectOneWithBLOBsElement;
return super.sqlMapSelectByExampleWithBLOBsElementGenerated(element, introspectedTable);
} }
|
public class class_name {
private List scrollAndPopulate(Object key, EntityMetadata entityMetadata, MetamodelImpl metaModel,
Table schemaTable, Iterator<Row> rowsIter, Map<String, Object> relationMap, List<String> columnsToSelect)
throws InstantiationException, IllegalAccessException
{
List results = new ArrayList();
Object entity = null;
EntityType entityType = metaModel.entity(entityMetadata.getEntityClazz());
// here
while (rowsIter.hasNext())
{
relationMap = new HashMap<String, Object>();
entity = initializeEntity(key, entityMetadata);
Row row = rowsIter.next();
List<String> fields = row.getTable().getFields();
FieldDef fieldMetadata = null;
FieldValue value = null;
String idColumnName = ((AbstractAttribute) entityMetadata.getIdAttribute()).getJPAColumnName();
if (/* eligibleToFetch(columnsToSelect, idColumnName) && */!metaModel
.isEmbeddable(entityMetadata.getIdAttribute().getBindableJavaType()))
{
populateId(entityMetadata, schemaTable, entity, row);
}
else
{
onEmbeddableId(entityMetadata, metaModel, schemaTable, entity, row);
}
Iterator<String> fieldIter = fields.iterator();
Set<Attribute> attributes = entityType.getAttributes();
for (Attribute attribute : attributes)
{
String jpaColumnName = ((AbstractAttribute) attribute).getJPAColumnName();
if (eligibleToFetch(columnsToSelect, jpaColumnName)
&& !attribute.getName().equals(entityMetadata.getIdAttribute().getName()))
{
if (metaModel.isEmbeddable(((AbstractAttribute) attribute).getBindableJavaType()))
{
// readEmbeddable(value, columnsToSelect,
// entityMetadata, metaModel, schemaTable, value,
// attribute);
EmbeddableType embeddableId = metaModel
.embeddable(((AbstractAttribute) attribute).getBindableJavaType());
Set<Attribute> embeddedAttributes = embeddableId.getAttributes();
Object embeddedObject = ((AbstractAttribute) attribute).getBindableJavaType().newInstance();
for (Attribute embeddedAttrib : embeddedAttributes)
{
String embeddedColumnName = ((AbstractAttribute) embeddedAttrib).getJPAColumnName();
fieldMetadata = schemaTable.getField(embeddedColumnName);
value = row.get(embeddedColumnName);
NoSqlDBUtils.get(fieldMetadata, value, embeddedObject,
(Field) embeddedAttrib.getJavaMember());
}
PropertyAccessorHelper.set(entity, (Field) attribute.getJavaMember(), embeddedObject);
}
else
{
fieldMetadata = schemaTable.getField(jpaColumnName);
value = row.get(jpaColumnName);
if (!attribute.isAssociation() && value != null)
{
NoSqlDBUtils.get(fieldMetadata, value, entity, (Field) attribute.getJavaMember());
}
else if (attribute.isAssociation() && value != null)
{
Relation relation = entityMetadata.getRelation(attribute.getName());
if (relation != null)
{
EntityMetadata associationMetadata = KunderaMetadataManager
.getEntityMetadata(kunderaMetadata, relation.getTargetEntity());
if (!relation.getType().equals(ForeignKey.MANY_TO_MANY))
{
relationMap.put(jpaColumnName, NoSqlDBUtils.get(fieldMetadata, value,
(Field) associationMetadata.getIdAttribute().getJavaMember()));
}
}
}
}
}
}
if (entity != null)
{
results.add(
relationMap.isEmpty() ? entity
: new EnhanceEntity(entity,
key != null ? key : PropertyAccessorHelper.getId(entity, entityMetadata),
relationMap));
}
}
return results;
} }
|
public class class_name {
private List scrollAndPopulate(Object key, EntityMetadata entityMetadata, MetamodelImpl metaModel,
Table schemaTable, Iterator<Row> rowsIter, Map<String, Object> relationMap, List<String> columnsToSelect)
throws InstantiationException, IllegalAccessException
{
List results = new ArrayList();
Object entity = null;
EntityType entityType = metaModel.entity(entityMetadata.getEntityClazz());
// here
while (rowsIter.hasNext())
{
relationMap = new HashMap<String, Object>();
entity = initializeEntity(key, entityMetadata);
Row row = rowsIter.next();
List<String> fields = row.getTable().getFields();
FieldDef fieldMetadata = null;
FieldValue value = null;
String idColumnName = ((AbstractAttribute) entityMetadata.getIdAttribute()).getJPAColumnName();
if (/* eligibleToFetch(columnsToSelect, idColumnName) && */!metaModel
.isEmbeddable(entityMetadata.getIdAttribute().getBindableJavaType()))
{
populateId(entityMetadata, schemaTable, entity, row);
}
else
{
onEmbeddableId(entityMetadata, metaModel, schemaTable, entity, row);
}
Iterator<String> fieldIter = fields.iterator();
Set<Attribute> attributes = entityType.getAttributes();
for (Attribute attribute : attributes)
{
String jpaColumnName = ((AbstractAttribute) attribute).getJPAColumnName();
if (eligibleToFetch(columnsToSelect, jpaColumnName)
&& !attribute.getName().equals(entityMetadata.getIdAttribute().getName()))
{
if (metaModel.isEmbeddable(((AbstractAttribute) attribute).getBindableJavaType()))
{
// readEmbeddable(value, columnsToSelect,
// entityMetadata, metaModel, schemaTable, value,
// attribute);
EmbeddableType embeddableId = metaModel
.embeddable(((AbstractAttribute) attribute).getBindableJavaType());
Set<Attribute> embeddedAttributes = embeddableId.getAttributes();
Object embeddedObject = ((AbstractAttribute) attribute).getBindableJavaType().newInstance();
for (Attribute embeddedAttrib : embeddedAttributes)
{
String embeddedColumnName = ((AbstractAttribute) embeddedAttrib).getJPAColumnName();
fieldMetadata = schemaTable.getField(embeddedColumnName); // depends on control dependency: [for], data = [none]
value = row.get(embeddedColumnName); // depends on control dependency: [for], data = [none]
NoSqlDBUtils.get(fieldMetadata, value, embeddedObject,
(Field) embeddedAttrib.getJavaMember()); // depends on control dependency: [for], data = [none]
}
PropertyAccessorHelper.set(entity, (Field) attribute.getJavaMember(), embeddedObject); // depends on control dependency: [if], data = [none]
}
else
{
fieldMetadata = schemaTable.getField(jpaColumnName); // depends on control dependency: [if], data = [none]
value = row.get(jpaColumnName); // depends on control dependency: [if], data = [none]
if (!attribute.isAssociation() && value != null)
{
NoSqlDBUtils.get(fieldMetadata, value, entity, (Field) attribute.getJavaMember()); // depends on control dependency: [if], data = [none]
}
else if (attribute.isAssociation() && value != null)
{
Relation relation = entityMetadata.getRelation(attribute.getName());
if (relation != null)
{
EntityMetadata associationMetadata = KunderaMetadataManager
.getEntityMetadata(kunderaMetadata, relation.getTargetEntity());
if (!relation.getType().equals(ForeignKey.MANY_TO_MANY))
{
relationMap.put(jpaColumnName, NoSqlDBUtils.get(fieldMetadata, value,
(Field) associationMetadata.getIdAttribute().getJavaMember())); // depends on control dependency: [if], data = [none]
}
}
}
}
}
}
if (entity != null)
{
results.add(
relationMap.isEmpty() ? entity
: new EnhanceEntity(entity,
key != null ? key : PropertyAccessorHelper.getId(entity, entityMetadata),
relationMap));
}
}
return results;
} }
|
public class class_name {
public void setCommitOrder(String order)
{
// Check that the specified order matches one of the ordering constants
if (!order.equals(FORWARD_ORDERING) && !order.equals(REVERSE_ORDERING))
{
return;
}
// Check that the new ordering is different from the existing one so that some work needs to be done to change
// it
if (order.equals(FORWARD_ORDERING) && (accessedScreens instanceof LifoStack))
{
// Copy the screens into a forward ordered stack
accessedScreens = new FifoStack(accessedScreens);
}
else if (order.equals(REVERSE_ORDERING) && (accessedScreens instanceof LifoStack))
{
// Copy the screens into a reverse ordered stack
accessedScreens = new LifoStack(accessedScreens);
}
} }
|
public class class_name {
public void setCommitOrder(String order)
{
// Check that the specified order matches one of the ordering constants
if (!order.equals(FORWARD_ORDERING) && !order.equals(REVERSE_ORDERING))
{
return; // depends on control dependency: [if], data = [none]
}
// Check that the new ordering is different from the existing one so that some work needs to be done to change
// it
if (order.equals(FORWARD_ORDERING) && (accessedScreens instanceof LifoStack))
{
// Copy the screens into a forward ordered stack
accessedScreens = new FifoStack(accessedScreens); // depends on control dependency: [if], data = [none]
}
else if (order.equals(REVERSE_ORDERING) && (accessedScreens instanceof LifoStack))
{
// Copy the screens into a reverse ordered stack
accessedScreens = new LifoStack(accessedScreens); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public DescribeActivationsResult withActivationList(Activation... activationList) {
if (this.activationList == null) {
setActivationList(new com.amazonaws.internal.SdkInternalList<Activation>(activationList.length));
}
for (Activation ele : activationList) {
this.activationList.add(ele);
}
return this;
} }
|
public class class_name {
public DescribeActivationsResult withActivationList(Activation... activationList) {
if (this.activationList == null) {
setActivationList(new com.amazonaws.internal.SdkInternalList<Activation>(activationList.length)); // depends on control dependency: [if], data = [none]
}
for (Activation ele : activationList) {
this.activationList.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
@Override public void setUniqueID(Integer uniqueID)
{
ProjectFile parent = getParentFile();
if (m_uniqueID != null)
{
parent.getCalendars().unmapUniqueID(m_uniqueID);
}
parent.getCalendars().mapUniqueID(uniqueID, this);
m_uniqueID = uniqueID;
} }
|
public class class_name {
@Override public void setUniqueID(Integer uniqueID)
{
ProjectFile parent = getParentFile();
if (m_uniqueID != null)
{
parent.getCalendars().unmapUniqueID(m_uniqueID); // depends on control dependency: [if], data = [(m_uniqueID]
}
parent.getCalendars().mapUniqueID(uniqueID, this);
m_uniqueID = uniqueID;
} }
|
public class class_name {
static public List<String> hasIssues(Vertex.RuntimeVertex vertex) {
List<String> issues = new ArrayList<>(ElementChecker.hasIssues(vertex));
if (vertex.getName() == null) {
issues.add("Name of vertex cannot be null");
} else {
if (vertex.getName().isEmpty()) {
issues.add("Name of vertex cannot be an empty string");
}
if (CharMatcher.whitespace().matchesAnyOf(vertex.getName())) {
issues.add("Name of vertex cannot have any white spaces.");
}
}
return issues;
} }
|
public class class_name {
static public List<String> hasIssues(Vertex.RuntimeVertex vertex) {
List<String> issues = new ArrayList<>(ElementChecker.hasIssues(vertex));
if (vertex.getName() == null) {
issues.add("Name of vertex cannot be null"); // depends on control dependency: [if], data = [none]
} else {
if (vertex.getName().isEmpty()) {
issues.add("Name of vertex cannot be an empty string"); // depends on control dependency: [if], data = [none]
}
if (CharMatcher.whitespace().matchesAnyOf(vertex.getName())) {
issues.add("Name of vertex cannot have any white spaces."); // depends on control dependency: [if], data = [none]
}
}
return issues;
} }
|
public class class_name {
@Override
public void applicationStopping(String appName)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
{
Tr.entry(tc, "MEF.applicationStopping for application " + appName);
}
synchronized (ivStateLock)
{
if (ivState == ACTIVE_STATE)
{
// No thread is ever waiting for a notification while
// in the active state, so simply change state to
// indicate deactivate is pending.
ivState = DEACTIVATE_PENDING_STATE;
}
else if (ivState == ACTIVATING_STATE)
{
// Change state to indicate deactivate is pending
// and notify any thread that is waiting for the activate
// to complete.
ivState = DEACTIVATE_PENDING_STATE;
ivStateLock.notifyAll();
}
else if (ivState == DEACTIVATE_PENDING_STATE)
{
// activateendpoint threw an exception and changed
// state to deactivate pending. So we need to notify
// threads that are waiting for the activate
// to complete (e.g. threads blocked by createEndpoint
// during the activation of an endpoint).
ivStateLock.notifyAll();
}
else if (ivState == INACTIVE_STATE)
{
// This is possible, but not likely. For example, we could create this
// MEF object and initialized it to INACTIVE_STATE. An exception occurs
// before we make it to the activateEndpoint method for this MEF. If
// that happens, then it is possible to get to this method while in
// the inactive state. So simply trace fact that this MEF was called
// in inactive state and nothing was actually done.
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled())
{
Tr.event(tc, "MEF.applicationStopping for application " + appName
+ " was called for an inactive endpoint.");
}
}
else
{
// The only other state is DEACTIVATING_STATE. This method should NEVER
// be called while in the deactivating state. Runtime framework should call
// this method before it ever attempts to stop the modules of an application.
// Thus, we should not enter deactivating state until after this method is called.
// Create FFDC log file for this problem and trace occurrence of this event.
// No reason to throw the exception since we created it just for the purpose
// of doing the FFDC.
String msg = "Internal programming error - applicationStopping called for application \""
+ appName + "\" while in deactivating state. This should NEVER occur.";
IllegalStateException ex = new IllegalStateException(msg);
FFDCFilter.processException(ex, CLASS_NAME + ".applicationStopping", "1208", this);
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled())
{
Tr.event(tc, msg);
}
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
{
Tr.exit(tc, "MEF.applicationStopping for application " + appName);
}
} }
|
public class class_name {
@Override
public void applicationStopping(String appName)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
{
Tr.entry(tc, "MEF.applicationStopping for application " + appName); // depends on control dependency: [if], data = [none]
}
synchronized (ivStateLock)
{
if (ivState == ACTIVE_STATE)
{
// No thread is ever waiting for a notification while
// in the active state, so simply change state to
// indicate deactivate is pending.
ivState = DEACTIVATE_PENDING_STATE; // depends on control dependency: [if], data = [none]
}
else if (ivState == ACTIVATING_STATE)
{
// Change state to indicate deactivate is pending
// and notify any thread that is waiting for the activate
// to complete.
ivState = DEACTIVATE_PENDING_STATE; // depends on control dependency: [if], data = [none]
ivStateLock.notifyAll(); // depends on control dependency: [if], data = [none]
}
else if (ivState == DEACTIVATE_PENDING_STATE)
{
// activateendpoint threw an exception and changed
// state to deactivate pending. So we need to notify
// threads that are waiting for the activate
// to complete (e.g. threads blocked by createEndpoint
// during the activation of an endpoint).
ivStateLock.notifyAll(); // depends on control dependency: [if], data = [none]
}
else if (ivState == INACTIVE_STATE)
{
// This is possible, but not likely. For example, we could create this
// MEF object and initialized it to INACTIVE_STATE. An exception occurs
// before we make it to the activateEndpoint method for this MEF. If
// that happens, then it is possible to get to this method while in
// the inactive state. So simply trace fact that this MEF was called
// in inactive state and nothing was actually done.
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled())
{
Tr.event(tc, "MEF.applicationStopping for application " + appName
+ " was called for an inactive endpoint."); // depends on control dependency: [if], data = [none]
}
}
else
{
// The only other state is DEACTIVATING_STATE. This method should NEVER
// be called while in the deactivating state. Runtime framework should call
// this method before it ever attempts to stop the modules of an application.
// Thus, we should not enter deactivating state until after this method is called.
// Create FFDC log file for this problem and trace occurrence of this event.
// No reason to throw the exception since we created it just for the purpose
// of doing the FFDC.
String msg = "Internal programming error - applicationStopping called for application \"" // depends on control dependency: [if], data = [none]
+ appName + "\" while in deactivating state. This should NEVER occur.";
IllegalStateException ex = new IllegalStateException(msg);
FFDCFilter.processException(ex, CLASS_NAME + ".applicationStopping", "1208", this); // depends on control dependency: [if], data = [none]
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled())
{
Tr.event(tc, msg); // depends on control dependency: [if], data = [none]
}
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
{
Tr.exit(tc, "MEF.applicationStopping for application " + appName); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static boolean toBoolean(Object obj) {
if (obj != null) {
if (obj instanceof Boolean) {
return (Boolean)obj;
} else {
return toBoolean(obj.toString());
}
} else {
return false;
}
} }
|
public class class_name {
public static boolean toBoolean(Object obj) {
if (obj != null) {
if (obj instanceof Boolean) {
return (Boolean)obj; // depends on control dependency: [if], data = [none]
} else {
return toBoolean(obj.toString()); // depends on control dependency: [if], data = [none]
}
} else {
return false; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private void writeDetailPageInfos(List<CmsDetailPageInfo> infos, CmsUUID newId) {
int i = 0;
for (CmsDetailPageInfo info : infos) {
if (info.isInherited()) {
continue;
}
CmsUUID id = info.getId();
if (id == null) {
id = newId;
}
writeValue(info.getType(), id, i);
i += 1;
}
} }
|
public class class_name {
private void writeDetailPageInfos(List<CmsDetailPageInfo> infos, CmsUUID newId) {
int i = 0;
for (CmsDetailPageInfo info : infos) {
if (info.isInherited()) {
continue;
}
CmsUUID id = info.getId();
if (id == null) {
id = newId; // depends on control dependency: [if], data = [none]
}
writeValue(info.getType(), id, i); // depends on control dependency: [for], data = [info]
i += 1; // depends on control dependency: [for], data = [none]
}
} }
|
public class class_name {
private void appendDefaultValue(StringBuilder sb, FieldType fieldType, Object defaultValue) {
if (fieldType.isEscapedDefaultValue()) {
appendEscapedWord(sb, defaultValue.toString());
} else {
sb.append(defaultValue);
}
} }
|
public class class_name {
private void appendDefaultValue(StringBuilder sb, FieldType fieldType, Object defaultValue) {
if (fieldType.isEscapedDefaultValue()) {
appendEscapedWord(sb, defaultValue.toString()); // depends on control dependency: [if], data = [none]
} else {
sb.append(defaultValue); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public String waitTextToRender(int seconds, String excludeText) {
String text = null;
if (seconds == 0 && ((text = getText(true)) != null && text.length() > 0 && !text.equals(excludeText))) {
return text;
}
for (int i = 0, count = 5 * seconds; i < count; i++) {
text = getText(true);
if (text != null && text.length() > 0 && !text.equals(excludeText)) {
return text;
}
if (i == 0) {
// log only fist time
LOGGER.debug("waitTextToRender");
}
Utils.sleep(200);
}
LOGGER.warn("No text was found for Element after " + seconds + " sec; " + this);
return excludeText.equals(text) ? null : text;
} }
|
public class class_name {
public String waitTextToRender(int seconds, String excludeText) {
String text = null;
if (seconds == 0 && ((text = getText(true)) != null && text.length() > 0 && !text.equals(excludeText))) {
return text; // depends on control dependency: [if], data = [none]
}
for (int i = 0, count = 5 * seconds; i < count; i++) {
text = getText(true); // depends on control dependency: [for], data = [none]
if (text != null && text.length() > 0 && !text.equals(excludeText)) {
return text; // depends on control dependency: [if], data = [none]
}
if (i == 0) {
// log only fist time
LOGGER.debug("waitTextToRender"); // depends on control dependency: [if], data = [none]
}
Utils.sleep(200); // depends on control dependency: [for], data = [none]
}
LOGGER.warn("No text was found for Element after " + seconds + " sec; " + this);
return excludeText.equals(text) ? null : text;
} }
|
public class class_name {
public long getReadyTimestamp(Constraints viewConstraints) {
String normalizedConstraints = getNormalizedConstraints(viewConstraints);
Path signalPath = new Path(signalDirectory, normalizedConstraints);
// check if the signal exists
try {
try {
FileStatus signalStatus = rootFileSystem.getFileStatus(signalPath);
return signalStatus.getModificationTime();
} catch (final FileNotFoundException ex) {
// empty, will be thrown when the signal path doesn't exist
}
return -1;
} catch (IOException e) {
throw new DatasetIOException("Could not access signal path: " + signalPath, e);
}
} }
|
public class class_name {
public long getReadyTimestamp(Constraints viewConstraints) {
String normalizedConstraints = getNormalizedConstraints(viewConstraints);
Path signalPath = new Path(signalDirectory, normalizedConstraints);
// check if the signal exists
try {
try {
FileStatus signalStatus = rootFileSystem.getFileStatus(signalPath);
return signalStatus.getModificationTime(); // depends on control dependency: [try], data = [none]
} catch (final FileNotFoundException ex) {
// empty, will be thrown when the signal path doesn't exist
} // depends on control dependency: [catch], data = [none]
return -1; // depends on control dependency: [try], data = [none]
} catch (IOException e) {
throw new DatasetIOException("Could not access signal path: " + signalPath, e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public void seek(int position) {
log.trace("seek - position:{}", position);
IConnection conn = Red5.getConnectionLocal();
if (conn instanceof IStreamCapableConnection) {
IStreamCapableConnection streamConn = (IStreamCapableConnection) conn;
Number streamId = conn.getStreamId();
IClientStream stream = streamConn.getStreamById(streamId);
if (stream != null && stream instanceof ISubscriberStream) {
ISubscriberStream subscriberStream = (ISubscriberStream) stream;
try {
subscriberStream.seek(position);
} catch (OperationNotSupportedException err) {
sendNSFailed(streamConn, StatusCodes.NS_SEEK_FAILED, "The stream doesn't support seeking.", stream.getName(), streamId);
}
}
}
} }
|
public class class_name {
public void seek(int position) {
log.trace("seek - position:{}", position);
IConnection conn = Red5.getConnectionLocal();
if (conn instanceof IStreamCapableConnection) {
IStreamCapableConnection streamConn = (IStreamCapableConnection) conn;
Number streamId = conn.getStreamId();
IClientStream stream = streamConn.getStreamById(streamId);
if (stream != null && stream instanceof ISubscriberStream) {
ISubscriberStream subscriberStream = (ISubscriberStream) stream;
try {
subscriberStream.seek(position);
// depends on control dependency: [try], data = [none]
} catch (OperationNotSupportedException err) {
sendNSFailed(streamConn, StatusCodes.NS_SEEK_FAILED, "The stream doesn't support seeking.", stream.getName(), streamId);
}
// depends on control dependency: [catch], data = [none]
}
}
} }
|
public class class_name {
private Iterable<Segment> getCompactableSegments(Storage storage, SegmentManager manager) {
List<Segment> segments = new ArrayList<>(manager.segments().size());
Iterator<Segment> iterator = manager.segments().iterator();
Segment segment = iterator.next();
while (iterator.hasNext()) {
Segment nextSegment = iterator.next();
// Segments that have already been compacted are eligible for compaction. For uncompacted segments, the segment must be full, consist
// of entries less than the minorIndex, and a later segment with at least one committed entry must exist in the log. This ensures that
// a non-empty entry always remains at the end of the log.
if (segment.isCompacted() || (segment.isFull() && segment.lastIndex() < compactor.minorIndex() && nextSegment.firstIndex() <= manager.commitIndex() && !nextSegment.isEmpty())) {
// Calculate the percentage of entries that have been released in the segment.
double compactablePercentage = segment.releaseCount() / (double) segment.count();
// If the percentage of entries released times the segment version meets the compaction threshold,
// add the segment to the segments list for compaction.
if (compactablePercentage * segment.descriptor().version() >= storage.compactionThreshold()) {
segments.add(segment);
}
}
segment = nextSegment;
}
return segments;
} }
|
public class class_name {
private Iterable<Segment> getCompactableSegments(Storage storage, SegmentManager manager) {
List<Segment> segments = new ArrayList<>(manager.segments().size());
Iterator<Segment> iterator = manager.segments().iterator();
Segment segment = iterator.next();
while (iterator.hasNext()) {
Segment nextSegment = iterator.next();
// Segments that have already been compacted are eligible for compaction. For uncompacted segments, the segment must be full, consist
// of entries less than the minorIndex, and a later segment with at least one committed entry must exist in the log. This ensures that
// a non-empty entry always remains at the end of the log.
if (segment.isCompacted() || (segment.isFull() && segment.lastIndex() < compactor.minorIndex() && nextSegment.firstIndex() <= manager.commitIndex() && !nextSegment.isEmpty())) {
// Calculate the percentage of entries that have been released in the segment.
double compactablePercentage = segment.releaseCount() / (double) segment.count();
// If the percentage of entries released times the segment version meets the compaction threshold,
// add the segment to the segments list for compaction.
if (compactablePercentage * segment.descriptor().version() >= storage.compactionThreshold()) {
segments.add(segment); // depends on control dependency: [if], data = [none]
}
}
segment = nextSegment; // depends on control dependency: [while], data = [none]
}
return segments;
} }
|
public class class_name {
public PipelineInstanceModels loadMinimalData(String pipelineName, Pagination pagination, Username username, OperationResult result) {
if (!goConfigService.currentCruiseConfig().hasPipelineNamed(new CaseInsensitiveString(pipelineName))) {
result.notFound("Not Found", "Pipeline " + pipelineName + " not found", HealthStateType.general(HealthStateScope.GLOBAL));
return null;
}
if (!securityService.hasViewPermissionForPipeline(username, pipelineName)) {
result.forbidden("Forbidden", NOT_AUTHORIZED_TO_VIEW_PIPELINE, HealthStateType.general(HealthStateScope.forPipeline(pipelineName)));
return null;
}
PipelineInstanceModels history = pipelineDao.loadHistory(pipelineName, pagination.getPageSize(), pagination.getOffset());
for (PipelineInstanceModel pipelineInstanceModel : history) {
populateMaterialRevisionsOnBuildCause(pipelineInstanceModel);
populatePlaceHolderStages(pipelineInstanceModel);
populateCanRunStatus(username, pipelineInstanceModel);
populateStageOperatePermission(pipelineInstanceModel, username);
}
return history;
} }
|
public class class_name {
public PipelineInstanceModels loadMinimalData(String pipelineName, Pagination pagination, Username username, OperationResult result) {
if (!goConfigService.currentCruiseConfig().hasPipelineNamed(new CaseInsensitiveString(pipelineName))) {
result.notFound("Not Found", "Pipeline " + pipelineName + " not found", HealthStateType.general(HealthStateScope.GLOBAL)); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
if (!securityService.hasViewPermissionForPipeline(username, pipelineName)) {
result.forbidden("Forbidden", NOT_AUTHORIZED_TO_VIEW_PIPELINE, HealthStateType.general(HealthStateScope.forPipeline(pipelineName))); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
PipelineInstanceModels history = pipelineDao.loadHistory(pipelineName, pagination.getPageSize(), pagination.getOffset());
for (PipelineInstanceModel pipelineInstanceModel : history) {
populateMaterialRevisionsOnBuildCause(pipelineInstanceModel); // depends on control dependency: [for], data = [pipelineInstanceModel]
populatePlaceHolderStages(pipelineInstanceModel); // depends on control dependency: [for], data = [pipelineInstanceModel]
populateCanRunStatus(username, pipelineInstanceModel); // depends on control dependency: [for], data = [pipelineInstanceModel]
populateStageOperatePermission(pipelineInstanceModel, username); // depends on control dependency: [for], data = [pipelineInstanceModel]
}
return history;
} }
|
public class class_name {
private void startReceiver()
{
if (receiver != null)
{
return;
}
receiver = receiverExecutor.submit(new Runnable()
{
@Override
public void run()
{
while (true)
{
try
{
if (readSelector.select() != 1 || Thread.interrupted())
{
break;
}
readSelector.selectedKeys().clear();
for (String value : packet.read(socket))
{
LOGGER.trace("Received message: {}", value);
if (interceptor != null && interceptor.intercept(value))
{
continue;
}
/*
* If the client stops waiting for a response, it
* will increment this value. To keep things in
* sync, those skipped responses need to be thrown
* away when they arrive.
*/
if (skippedResponses.get() > 0)
{
LOGGER.trace("Discarding skipped message: {}", value);
skippedResponses.decrementAndGet();
continue;
}
queue.add(value);
}
}
catch (InterruptedIOException e)
{
LOGGER.info("Receiver interrupted");
break;
}
catch (ClosedByInterruptException e)
{
LOGGER.info("Receiver closed by interrupt");
break;
}
catch (IOException e)
{
LOGGER.error("Connection error", e);
disconnect();
break;
}
}
}
});
} }
|
public class class_name {
private void startReceiver()
{
if (receiver != null)
{
return; // depends on control dependency: [if], data = [none]
}
receiver = receiverExecutor.submit(new Runnable()
{
@Override
public void run()
{
while (true)
{
try
{
if (readSelector.select() != 1 || Thread.interrupted())
{
break;
}
readSelector.selectedKeys().clear(); // depends on control dependency: [try], data = [none]
for (String value : packet.read(socket))
{
LOGGER.trace("Received message: {}", value); // depends on control dependency: [for], data = [value]
if (interceptor != null && interceptor.intercept(value))
{
continue;
}
/*
* If the client stops waiting for a response, it
* will increment this value. To keep things in
* sync, those skipped responses need to be thrown
* away when they arrive.
*/
if (skippedResponses.get() > 0)
{
LOGGER.trace("Discarding skipped message: {}", value); // depends on control dependency: [if], data = [none]
skippedResponses.decrementAndGet(); // depends on control dependency: [if], data = [none]
continue;
}
queue.add(value); // depends on control dependency: [for], data = [value]
}
}
catch (InterruptedIOException e)
{
LOGGER.info("Receiver interrupted");
break;
} // depends on control dependency: [catch], data = [none]
catch (ClosedByInterruptException e)
{
LOGGER.info("Receiver closed by interrupt");
break;
} // depends on control dependency: [catch], data = [none]
catch (IOException e)
{
LOGGER.error("Connection error", e);
disconnect();
break;
} // depends on control dependency: [catch], data = [none]
}
}
});
} }
|
public class class_name {
static TokenMap<String> fromFragment(final Object... tags) {
return new TokenMap<String>() {
@Override public String map(final Token token) {
final Object val = token.value();
if (val instanceof Fragment) {
Fragment c = (Fragment) val;
if (!Objects.in(c.tag(), tags)) return null;
return c.text();
}
else return null;
}
@Override public String toString() {
if (tags.length == 0) return "";
if (tags.length == 1) return String.valueOf(tags[0]);
return "[" + Strings.join(", ", tags) + "]";
}
};
} }
|
public class class_name {
static TokenMap<String> fromFragment(final Object... tags) {
return new TokenMap<String>() {
@Override public String map(final Token token) {
final Object val = token.value();
if (val instanceof Fragment) {
Fragment c = (Fragment) val;
if (!Objects.in(c.tag(), tags)) return null;
return c.text(); // depends on control dependency: [if], data = [none]
}
else return null;
}
@Override public String toString() {
if (tags.length == 0) return "";
if (tags.length == 1) return String.valueOf(tags[0]);
return "[" + Strings.join(", ", tags) + "]";
}
};
} }
|
public class class_name {
@Override
public TextObject append(CharSequence text) {
if (maxTextLength>0 && stringBuilder.length()>=maxTextLength) return this;
text = textFilter.filter(text);
//unfortunately this code can't be put into a TextFilter because:
//1) the limit could not be detected early, a lot of work would be done to waste time and memory
//2) the last character of the existing string builder could not be seen. if it is a space, we don't want
// to add yet another space.
char pre = stringBuilder.length()==0 ? 0 : stringBuilder.charAt(stringBuilder.length()-1);
for (int i=0; i<text.length() && (maxTextLength==0 || stringBuilder.length()<maxTextLength); i++) {
char c = CharNormalizer.normalize(text.charAt(i));
if (c != ' ' || pre != ' ') {
stringBuilder.append(c);
}
pre = c;
}
return this;
} }
|
public class class_name {
@Override
public TextObject append(CharSequence text) {
if (maxTextLength>0 && stringBuilder.length()>=maxTextLength) return this;
text = textFilter.filter(text);
//unfortunately this code can't be put into a TextFilter because:
//1) the limit could not be detected early, a lot of work would be done to waste time and memory
//2) the last character of the existing string builder could not be seen. if it is a space, we don't want
// to add yet another space.
char pre = stringBuilder.length()==0 ? 0 : stringBuilder.charAt(stringBuilder.length()-1);
for (int i=0; i<text.length() && (maxTextLength==0 || stringBuilder.length()<maxTextLength); i++) {
char c = CharNormalizer.normalize(text.charAt(i));
if (c != ' ' || pre != ' ') {
stringBuilder.append(c); // depends on control dependency: [if], data = [(c]
}
pre = c; // depends on control dependency: [for], data = [none]
}
return this;
} }
|
public class class_name {
public Matrix4f setPerspectiveLH(float fovy, float aspect, float zNear, float zFar, boolean zZeroToOne) {
MemUtil.INSTANCE.zero(this);
float h = (float) Math.tan(fovy * 0.5f);
this._m00(1.0f / (h * aspect));
this._m11(1.0f / h);
boolean farInf = zFar > 0 && Float.isInfinite(zFar);
boolean nearInf = zNear > 0 && Float.isInfinite(zNear);
if (farInf) {
// See: "Infinite Projection Matrix" (http://www.terathon.com/gdc07_lengyel.pdf)
float e = 1E-6f;
this._m22(1.0f - e);
this._m32((e - (zZeroToOne ? 1.0f : 2.0f)) * zNear);
} else if (nearInf) {
float e = 1E-6f;
this._m22((zZeroToOne ? 0.0f : 1.0f) - e);
this._m32(((zZeroToOne ? 1.0f : 2.0f) - e) * zFar);
} else {
this._m22((zZeroToOne ? zFar : zFar + zNear) / (zFar - zNear));
this._m32((zZeroToOne ? zFar : zFar + zFar) * zNear / (zNear - zFar));
}
this._m23(1.0f);
_properties(PROPERTY_PERSPECTIVE);
return this;
} }
|
public class class_name {
public Matrix4f setPerspectiveLH(float fovy, float aspect, float zNear, float zFar, boolean zZeroToOne) {
MemUtil.INSTANCE.zero(this);
float h = (float) Math.tan(fovy * 0.5f);
this._m00(1.0f / (h * aspect));
this._m11(1.0f / h);
boolean farInf = zFar > 0 && Float.isInfinite(zFar);
boolean nearInf = zNear > 0 && Float.isInfinite(zNear);
if (farInf) {
// See: "Infinite Projection Matrix" (http://www.terathon.com/gdc07_lengyel.pdf)
float e = 1E-6f;
this._m22(1.0f - e); // depends on control dependency: [if], data = [none]
this._m32((e - (zZeroToOne ? 1.0f : 2.0f)) * zNear); // depends on control dependency: [if], data = [none]
} else if (nearInf) {
float e = 1E-6f;
this._m22((zZeroToOne ? 0.0f : 1.0f) - e); // depends on control dependency: [if], data = [none]
this._m32(((zZeroToOne ? 1.0f : 2.0f) - e) * zFar); // depends on control dependency: [if], data = [none]
} else {
this._m22((zZeroToOne ? zFar : zFar + zNear) / (zFar - zNear)); // depends on control dependency: [if], data = [none]
this._m32((zZeroToOne ? zFar : zFar + zFar) * zNear / (zNear - zFar)); // depends on control dependency: [if], data = [none]
}
this._m23(1.0f);
_properties(PROPERTY_PERSPECTIVE);
return this;
} }
|
public class class_name {
private void cleanPipes()
{
assert (pipe != null);
// Get rid of half-processed messages in the out pipe. Flush any
// unflushed messages upstream.
pipe.rollback();
pipe.flush();
// Remove any half-read message from the in pipe.
while (incompleteIn) {
Msg msg = pullMsg();
if (msg == null) {
assert (!incompleteIn);
break;
}
// msg.close ();
}
} }
|
public class class_name {
private void cleanPipes()
{
assert (pipe != null);
// Get rid of half-processed messages in the out pipe. Flush any
// unflushed messages upstream.
pipe.rollback();
pipe.flush();
// Remove any half-read message from the in pipe.
while (incompleteIn) {
Msg msg = pullMsg();
if (msg == null) {
assert (!incompleteIn); // depends on control dependency: [if], data = [none]
break;
}
// msg.close ();
}
} }
|
public class class_name {
public static void responseFile(HttpServletResponse response, String local, boolean isDownloaded) throws IOException {
if (Checker.isExists(local)) {
File file = new File(local);
try (FileInputStream in = new FileInputStream(file); ServletOutputStream os = response.getOutputStream()) {
byte[] b;
while (in.available() > 0) {
b = in.available() > 1024 ? new byte[1024] : new byte[in.available()];
in.read(b, 0, b.length);
os.write(b, 0, b.length);
}
os.flush();
}
if (isDownloaded) {
String fn = new String(file.getName().getBytes(StandardCharsets.UTF_8), StandardCharsets.ISO_8859_1);
response.setHeader("Content-Disposition", "attachment;filename=" + fn);
}
} else {
response.setStatus(404);
}
} }
|
public class class_name {
public static void responseFile(HttpServletResponse response, String local, boolean isDownloaded) throws IOException {
if (Checker.isExists(local)) {
File file = new File(local);
try (FileInputStream in = new FileInputStream(file); ServletOutputStream os = response.getOutputStream()) {
byte[] b;
while (in.available() > 0) {
b = in.available() > 1024 ? new byte[1024] : new byte[in.available()]; // depends on control dependency: [while], data = [none]
in.read(b, 0, b.length); // depends on control dependency: [while], data = [none]
os.write(b, 0, b.length); // depends on control dependency: [while], data = [none]
}
os.flush();
}
if (isDownloaded) {
String fn = new String(file.getName().getBytes(StandardCharsets.UTF_8), StandardCharsets.ISO_8859_1);
response.setHeader("Content-Disposition", "attachment;filename=" + fn);
}
} else {
response.setStatus(404);
}
} }
|
public class class_name {
private void adaptButtonTextColor() {
if (buttonTextColor != null) {
if (positiveButton != null) {
positiveButton.setTextColor(buttonTextColor);
}
if (neutralButton != null) {
neutralButton.setTextColor(buttonTextColor);
}
if (negativeButton != null) {
negativeButton.setTextColor(buttonTextColor);
}
}
} }
|
public class class_name {
private void adaptButtonTextColor() {
if (buttonTextColor != null) {
if (positiveButton != null) {
positiveButton.setTextColor(buttonTextColor); // depends on control dependency: [if], data = [none]
}
if (neutralButton != null) {
neutralButton.setTextColor(buttonTextColor); // depends on control dependency: [if], data = [none]
}
if (negativeButton != null) {
negativeButton.setTextColor(buttonTextColor); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public static void setMatrix(final double[][] m1, final int[] r, final int[] c, final double[][] m2) {
for(int i = 0; i < r.length; i++) {
final double[] row1 = m1[r[i]], row2 = m2[i];
for(int j = 0; j < c.length; j++) {
row1[c[j]] = row2[j];
}
}
} }
|
public class class_name {
public static void setMatrix(final double[][] m1, final int[] r, final int[] c, final double[][] m2) {
for(int i = 0; i < r.length; i++) {
final double[] row1 = m1[r[i]], row2 = m2[i];
for(int j = 0; j < c.length; j++) {
row1[c[j]] = row2[j]; // depends on control dependency: [for], data = [j]
}
}
} }
|
public class class_name {
static public String
backslashEscape(String s, String wrt)
{
if(wrt == null)
wrt = BACKSLASHESCAPE;
StringBuilder escaped = new StringBuilder();
for(int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if(c < ' ' || c == 127) {
escaped.append('\\');
switch (c) {
case '\r':
c = 'r';
break;
case '\n':
c = 'n';
break;
case '\t':
c = 't';
break;
case '\f':
c = 'f';
break;
default:
escaped.append('x');
escaped.append(Escape.toHex((int) c));
continue; /* since this is a string */
}
} else if(c == '\\' || wrt.indexOf(c) >= 0)
escaped.append('\\');
escaped.append(c);
}
return escaped.toString();
} }
|
public class class_name {
static public String
backslashEscape(String s, String wrt)
{
if(wrt == null)
wrt = BACKSLASHESCAPE;
StringBuilder escaped = new StringBuilder();
for(int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if(c < ' ' || c == 127) {
escaped.append('\\'); // depends on control dependency: [if], data = [none]
switch (c) {
case '\r':
c = 'r';
break;
case '\n':
c = 'n';
break;
case '\t':
c = 't';
break;
case '\f':
c = 'f';
break;
default:
escaped.append('x');
escaped.append(Escape.toHex((int) c));
continue; /* since this is a string */
}
} else if(c == '\\' || wrt.indexOf(c) >= 0)
escaped.append('\\');
escaped.append(c); // depends on control dependency: [for], data = [none]
}
return escaped.toString();
} }
|
public class class_name {
public void addMember(PdfLayer layer) {
if (!layers.contains(layer)) {
members.add(layer.getRef());
layers.add(layer);
}
} }
|
public class class_name {
public void addMember(PdfLayer layer) {
if (!layers.contains(layer)) {
members.add(layer.getRef()); // depends on control dependency: [if], data = [none]
layers.add(layer); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private DbOperation hasOptimisticLockingException(List<DbOperation> operationsToFlush, Throwable cause) {
BatchExecutorException batchExecutorException = ExceptionUtil.findBatchExecutorException(cause);
if (batchExecutorException != null) {
int failedOperationIndex = batchExecutorException.getSuccessfulBatchResults().size();
if (failedOperationIndex < operationsToFlush.size()) {
DbOperation failedOperation = operationsToFlush.get(failedOperationIndex);
if (isOptimisticLockingException(failedOperation, cause)) {
return failedOperation;
}
}
}
return null;
} }
|
public class class_name {
private DbOperation hasOptimisticLockingException(List<DbOperation> operationsToFlush, Throwable cause) {
BatchExecutorException batchExecutorException = ExceptionUtil.findBatchExecutorException(cause);
if (batchExecutorException != null) {
int failedOperationIndex = batchExecutorException.getSuccessfulBatchResults().size();
if (failedOperationIndex < operationsToFlush.size()) {
DbOperation failedOperation = operationsToFlush.get(failedOperationIndex);
if (isOptimisticLockingException(failedOperation, cause)) {
return failedOperation; // depends on control dependency: [if], data = [none]
}
}
}
return null;
} }
|
public class class_name {
public SBPrintStream toJavaStringInit(String[] ss) {
if (ss == null) {
return p("null");
}
p('{');
for (int i = 0; i < ss.length - 1; i++) {
p('"').pj(ss[i]).p("\",");
}
if (ss.length > 0) {
p('"').pj(ss[ss.length - 1]).p('"');
}
return p('}');
} }
|
public class class_name {
public SBPrintStream toJavaStringInit(String[] ss) {
if (ss == null) {
return p("null"); // depends on control dependency: [if], data = [none]
}
p('{');
for (int i = 0; i < ss.length - 1; i++) {
p('"').pj(ss[i]).p("\","); // depends on control dependency: [for], data = [i]
}
if (ss.length > 0) {
p('"').pj(ss[ss.length - 1]).p('"'); // depends on control dependency: [if], data = [none]
}
return p('}');
} }
|
public class class_name {
@Override
public void visit(@Nonnull OWLAnnotation node) {
node.getProperty().accept(this);
node.getValue().accept(this);
for (OWLAnnotation anno : node.getAnnotations()) {
anno.accept(this);
}
} }
|
public class class_name {
@Override
public void visit(@Nonnull OWLAnnotation node) {
node.getProperty().accept(this);
node.getValue().accept(this);
for (OWLAnnotation anno : node.getAnnotations()) {
anno.accept(this); // depends on control dependency: [for], data = [anno]
}
} }
|
public class class_name {
@Override
public List<CPDefinitionLink> findByCP_T(long CProductId, String type,
int start, int end,
OrderByComparator<CPDefinitionLink> orderByComparator,
boolean retrieveFromCache) {
boolean pagination = true;
FinderPath finderPath = null;
Object[] finderArgs = null;
if ((start == QueryUtil.ALL_POS) && (end == QueryUtil.ALL_POS) &&
(orderByComparator == null)) {
pagination = false;
finderPath = FINDER_PATH_WITHOUT_PAGINATION_FIND_BY_CP_T;
finderArgs = new Object[] { CProductId, type };
}
else {
finderPath = FINDER_PATH_WITH_PAGINATION_FIND_BY_CP_T;
finderArgs = new Object[] {
CProductId, type,
start, end, orderByComparator
};
}
List<CPDefinitionLink> list = null;
if (retrieveFromCache) {
list = (List<CPDefinitionLink>)finderCache.getResult(finderPath,
finderArgs, this);
if ((list != null) && !list.isEmpty()) {
for (CPDefinitionLink cpDefinitionLink : list) {
if ((CProductId != cpDefinitionLink.getCProductId()) ||
!Objects.equals(type, cpDefinitionLink.getType())) {
list = null;
break;
}
}
}
}
if (list == null) {
StringBundler query = null;
if (orderByComparator != null) {
query = new StringBundler(4 +
(orderByComparator.getOrderByFields().length * 2));
}
else {
query = new StringBundler(4);
}
query.append(_SQL_SELECT_CPDEFINITIONLINK_WHERE);
query.append(_FINDER_COLUMN_CP_T_CPRODUCTID_2);
boolean bindType = false;
if (type == null) {
query.append(_FINDER_COLUMN_CP_T_TYPE_1);
}
else if (type.equals("")) {
query.append(_FINDER_COLUMN_CP_T_TYPE_3);
}
else {
bindType = true;
query.append(_FINDER_COLUMN_CP_T_TYPE_2);
}
if (orderByComparator != null) {
appendOrderByComparator(query, _ORDER_BY_ENTITY_ALIAS,
orderByComparator);
}
else
if (pagination) {
query.append(CPDefinitionLinkModelImpl.ORDER_BY_JPQL);
}
String sql = query.toString();
Session session = null;
try {
session = openSession();
Query q = session.createQuery(sql);
QueryPos qPos = QueryPos.getInstance(q);
qPos.add(CProductId);
if (bindType) {
qPos.add(type);
}
if (!pagination) {
list = (List<CPDefinitionLink>)QueryUtil.list(q,
getDialect(), start, end, false);
Collections.sort(list);
list = Collections.unmodifiableList(list);
}
else {
list = (List<CPDefinitionLink>)QueryUtil.list(q,
getDialect(), start, end);
}
cacheResult(list);
finderCache.putResult(finderPath, finderArgs, list);
}
catch (Exception e) {
finderCache.removeResult(finderPath, finderArgs);
throw processException(e);
}
finally {
closeSession(session);
}
}
return list;
} }
|
public class class_name {
@Override
public List<CPDefinitionLink> findByCP_T(long CProductId, String type,
int start, int end,
OrderByComparator<CPDefinitionLink> orderByComparator,
boolean retrieveFromCache) {
boolean pagination = true;
FinderPath finderPath = null;
Object[] finderArgs = null;
if ((start == QueryUtil.ALL_POS) && (end == QueryUtil.ALL_POS) &&
(orderByComparator == null)) {
pagination = false; // depends on control dependency: [if], data = [none]
finderPath = FINDER_PATH_WITHOUT_PAGINATION_FIND_BY_CP_T; // depends on control dependency: [if], data = [none]
finderArgs = new Object[] { CProductId, type }; // depends on control dependency: [if], data = [none]
}
else {
finderPath = FINDER_PATH_WITH_PAGINATION_FIND_BY_CP_T; // depends on control dependency: [if], data = [none]
finderArgs = new Object[] {
CProductId, type,
start, end, orderByComparator
}; // depends on control dependency: [if], data = [none]
}
List<CPDefinitionLink> list = null;
if (retrieveFromCache) {
list = (List<CPDefinitionLink>)finderCache.getResult(finderPath,
finderArgs, this); // depends on control dependency: [if], data = [none]
if ((list != null) && !list.isEmpty()) {
for (CPDefinitionLink cpDefinitionLink : list) {
if ((CProductId != cpDefinitionLink.getCProductId()) ||
!Objects.equals(type, cpDefinitionLink.getType())) {
list = null; // depends on control dependency: [if], data = [none]
break;
}
}
}
}
if (list == null) {
StringBundler query = null;
if (orderByComparator != null) {
query = new StringBundler(4 +
(orderByComparator.getOrderByFields().length * 2)); // depends on control dependency: [if], data = [none]
}
else {
query = new StringBundler(4); // depends on control dependency: [if], data = [none]
}
query.append(_SQL_SELECT_CPDEFINITIONLINK_WHERE); // depends on control dependency: [if], data = [none]
query.append(_FINDER_COLUMN_CP_T_CPRODUCTID_2); // depends on control dependency: [if], data = [none]
boolean bindType = false;
if (type == null) {
query.append(_FINDER_COLUMN_CP_T_TYPE_1); // depends on control dependency: [if], data = [none]
}
else if (type.equals("")) {
query.append(_FINDER_COLUMN_CP_T_TYPE_3); // depends on control dependency: [if], data = [none]
}
else {
bindType = true; // depends on control dependency: [if], data = [none]
query.append(_FINDER_COLUMN_CP_T_TYPE_2); // depends on control dependency: [if], data = [none]
}
if (orderByComparator != null) {
appendOrderByComparator(query, _ORDER_BY_ENTITY_ALIAS,
orderByComparator); // depends on control dependency: [if], data = [none]
}
else
if (pagination) {
query.append(CPDefinitionLinkModelImpl.ORDER_BY_JPQL); // depends on control dependency: [if], data = [none]
}
String sql = query.toString();
Session session = null;
try {
session = openSession(); // depends on control dependency: [try], data = [none]
Query q = session.createQuery(sql);
QueryPos qPos = QueryPos.getInstance(q);
qPos.add(CProductId); // depends on control dependency: [try], data = [none]
if (bindType) {
qPos.add(type); // depends on control dependency: [if], data = [none]
}
if (!pagination) {
list = (List<CPDefinitionLink>)QueryUtil.list(q,
getDialect(), start, end, false); // depends on control dependency: [if], data = [none]
Collections.sort(list); // depends on control dependency: [if], data = [none]
list = Collections.unmodifiableList(list); // depends on control dependency: [if], data = [none]
}
else {
list = (List<CPDefinitionLink>)QueryUtil.list(q,
getDialect(), start, end); // depends on control dependency: [if], data = [none]
}
cacheResult(list); // depends on control dependency: [try], data = [none]
finderCache.putResult(finderPath, finderArgs, list); // depends on control dependency: [try], data = [none]
}
catch (Exception e) {
finderCache.removeResult(finderPath, finderArgs);
throw processException(e);
} // depends on control dependency: [catch], data = [none]
finally {
closeSession(session);
}
}
return list;
} }
|
public class class_name {
protected void mergeAndSaveContentSpec(final DataProviderFactory providerFactory, final ProcessorData processorData,
boolean create) throws Exception {
// Get the providers
final ContentSpecProvider contentSpecProvider = providerFactory.getProvider(ContentSpecProvider.class);
final PropertyTagProvider propertyTagProvider = providerFactory.getProvider(PropertyTagProvider.class);
final TagProvider tagProvider = providerFactory.getProvider(TagProvider.class);
// Create the temporary entity to store changes in and load the real entity if it exists.
final ContentSpec contentSpec = processorData.getContentSpec();
ContentSpecWrapper contentSpecEntity = null;
if (contentSpec.getId() != null) {
contentSpecEntity = contentSpecProvider.getContentSpec(contentSpec.getId());
} else if (create) {
contentSpecEntity = contentSpecProvider.newContentSpec();
// setup the basic values
final LocaleWrapper locale = contentSpec.getLocale() != null ?
EntityUtilities.findLocaleFromString(serverSettings.getLocales(), contentSpec.getLocale())
: serverSettings.getDefaultLocale();
contentSpecEntity.setLocale(locale);
if (processorData.getUsername() != null) {
// Add the added by property tag
final UpdateableCollectionWrapper<PropertyTagInContentSpecWrapper> propertyTagCollection = propertyTagProvider
.newPropertyTagInContentSpecCollection(contentSpecEntity);
// Create the new property tag
final PropertyTagWrapper addedByProperty = propertyTagProvider.getPropertyTag(serverEntities.getAddedByPropertyTagId());
final PropertyTagInContentSpecWrapper propertyTag = propertyTagProvider.newPropertyTagInContentSpec(addedByProperty,
contentSpecEntity);
propertyTag.setValue(processorData.getUsername());
propertyTagCollection.addNewItem(propertyTag);
// Set the updated properties for the content spec
contentSpecEntity.setProperties(propertyTagCollection);
}
} else {
throw new ProcessingException("Unable to find the existing Content Specification");
}
// Check that the type still matches
final int typeId = BookType.getBookTypeId(contentSpec.getBookType());
if (contentSpecEntity.getType() == null || !contentSpecEntity.getType().equals(typeId)) {
contentSpecEntity.setType(typeId);
}
final ArrayList<CSNodeWrapper> contentSpecNodes = new ArrayList<CSNodeWrapper>();
if (contentSpecEntity.getChildren() != null) {
contentSpecNodes.addAll(contentSpecEntity.getChildren().getItems());
}
// Create the content spec entity so that we have a valid reference to add nodes to
if (create) {
contentSpecEntity = contentSpecProvider.createContentSpec(contentSpecEntity);
}
// Check that the content spec was updated/created successfully.
if (contentSpecEntity == null) {
throw new ProcessingException("Saving the updated Content Specification failed.");
}
contentSpec.setId(contentSpecEntity.getId());
// Set the bug links last validated property
if (processorData.isBugLinksReValidated()) {
// Get the collection to use
final UpdateableCollectionWrapper<PropertyTagInContentSpecWrapper> propertyTagCollection = contentSpecEntity.getProperties()
== null ? propertyTagProvider.newPropertyTagInContentSpecCollection(
contentSpecEntity) : contentSpecEntity.getProperties();
// Check if the property already exists and if so remove it, and then create a new one to ensure it a revision is created
for (final PropertyTagInContentSpecWrapper propertyTag : propertyTagCollection.getItems()) {
if (propertyTag.getId().equals(serverEntities.getBugLinksLastValidatedPropertyTagId())) {
propertyTag.setValue(Long.toString(new Date().getTime()));
propertyTagCollection.remove(propertyTag);
propertyTagCollection.addRemoveItem(propertyTag);
}
}
// Add the new tag
final PropertyTagWrapper lastUpdatedProperty = propertyTagProvider.getPropertyTag(
serverEntities.getBugLinksLastValidatedPropertyTagId());
final PropertyTagInContentSpecWrapper propertyTag = propertyTagProvider.newPropertyTagInContentSpec(lastUpdatedProperty,
contentSpecEntity);
propertyTag.setValue(Long.toString(new Date().getTime()));
propertyTagCollection.addNewItem(propertyTag);
// Set the updated properties for the content spec
contentSpecEntity.setProperties(propertyTagCollection);
}
// Add any global book tags
mergeGlobalOptions(contentSpecEntity, contentSpec, tagProvider);
// Get the list of transformable child nodes for processing
final List<Node> nodes = getTransformableNodes(contentSpec.getNodes());
nodes.addAll(getTransformableNodes(contentSpec.getBaseLevel().getChildNodes()));
// Merge the base level and comments
final Map<SpecNode, CSNodeWrapper> nodeMapping = new HashMap<SpecNode, CSNodeWrapper>();
mergeChildren(nodes, contentSpecNodes, providerFactory, null, contentSpecEntity, nodeMapping);
contentSpecProvider.updateContentSpec(contentSpecEntity);
// Merge the relationships now all nodes have a mapping to a database node
mergeRelationships(nodeMapping, providerFactory);
contentSpecProvider.updateContentSpec(contentSpecEntity, processorData.getLogMessage());
} }
|
public class class_name {
protected void mergeAndSaveContentSpec(final DataProviderFactory providerFactory, final ProcessorData processorData,
boolean create) throws Exception {
// Get the providers
final ContentSpecProvider contentSpecProvider = providerFactory.getProvider(ContentSpecProvider.class);
final PropertyTagProvider propertyTagProvider = providerFactory.getProvider(PropertyTagProvider.class);
final TagProvider tagProvider = providerFactory.getProvider(TagProvider.class);
// Create the temporary entity to store changes in and load the real entity if it exists.
final ContentSpec contentSpec = processorData.getContentSpec();
ContentSpecWrapper contentSpecEntity = null;
if (contentSpec.getId() != null) {
contentSpecEntity = contentSpecProvider.getContentSpec(contentSpec.getId());
} else if (create) {
contentSpecEntity = contentSpecProvider.newContentSpec();
// setup the basic values
final LocaleWrapper locale = contentSpec.getLocale() != null ?
EntityUtilities.findLocaleFromString(serverSettings.getLocales(), contentSpec.getLocale())
: serverSettings.getDefaultLocale();
contentSpecEntity.setLocale(locale);
if (processorData.getUsername() != null) {
// Add the added by property tag
final UpdateableCollectionWrapper<PropertyTagInContentSpecWrapper> propertyTagCollection = propertyTagProvider
.newPropertyTagInContentSpecCollection(contentSpecEntity);
// Create the new property tag
final PropertyTagWrapper addedByProperty = propertyTagProvider.getPropertyTag(serverEntities.getAddedByPropertyTagId());
final PropertyTagInContentSpecWrapper propertyTag = propertyTagProvider.newPropertyTagInContentSpec(addedByProperty,
contentSpecEntity);
propertyTag.setValue(processorData.getUsername()); // depends on control dependency: [if], data = [(processorData.getUsername()]
propertyTagCollection.addNewItem(propertyTag); // depends on control dependency: [if], data = [none]
// Set the updated properties for the content spec
contentSpecEntity.setProperties(propertyTagCollection); // depends on control dependency: [if], data = [none]
}
} else {
throw new ProcessingException("Unable to find the existing Content Specification");
}
// Check that the type still matches
final int typeId = BookType.getBookTypeId(contentSpec.getBookType());
if (contentSpecEntity.getType() == null || !contentSpecEntity.getType().equals(typeId)) {
contentSpecEntity.setType(typeId);
}
final ArrayList<CSNodeWrapper> contentSpecNodes = new ArrayList<CSNodeWrapper>();
if (contentSpecEntity.getChildren() != null) {
contentSpecNodes.addAll(contentSpecEntity.getChildren().getItems());
}
// Create the content spec entity so that we have a valid reference to add nodes to
if (create) {
contentSpecEntity = contentSpecProvider.createContentSpec(contentSpecEntity);
}
// Check that the content spec was updated/created successfully.
if (contentSpecEntity == null) {
throw new ProcessingException("Saving the updated Content Specification failed.");
}
contentSpec.setId(contentSpecEntity.getId());
// Set the bug links last validated property
if (processorData.isBugLinksReValidated()) {
// Get the collection to use
final UpdateableCollectionWrapper<PropertyTagInContentSpecWrapper> propertyTagCollection = contentSpecEntity.getProperties()
== null ? propertyTagProvider.newPropertyTagInContentSpecCollection(
contentSpecEntity) : contentSpecEntity.getProperties();
// Check if the property already exists and if so remove it, and then create a new one to ensure it a revision is created
for (final PropertyTagInContentSpecWrapper propertyTag : propertyTagCollection.getItems()) {
if (propertyTag.getId().equals(serverEntities.getBugLinksLastValidatedPropertyTagId())) {
propertyTag.setValue(Long.toString(new Date().getTime()));
propertyTagCollection.remove(propertyTag);
propertyTagCollection.addRemoveItem(propertyTag);
}
}
// Add the new tag
final PropertyTagWrapper lastUpdatedProperty = propertyTagProvider.getPropertyTag(
serverEntities.getBugLinksLastValidatedPropertyTagId());
final PropertyTagInContentSpecWrapper propertyTag = propertyTagProvider.newPropertyTagInContentSpec(lastUpdatedProperty,
contentSpecEntity);
propertyTag.setValue(Long.toString(new Date().getTime()));
propertyTagCollection.addNewItem(propertyTag);
// Set the updated properties for the content spec
contentSpecEntity.setProperties(propertyTagCollection);
}
// Add any global book tags
mergeGlobalOptions(contentSpecEntity, contentSpec, tagProvider);
// Get the list of transformable child nodes for processing
final List<Node> nodes = getTransformableNodes(contentSpec.getNodes());
nodes.addAll(getTransformableNodes(contentSpec.getBaseLevel().getChildNodes()));
// Merge the base level and comments
final Map<SpecNode, CSNodeWrapper> nodeMapping = new HashMap<SpecNode, CSNodeWrapper>();
mergeChildren(nodes, contentSpecNodes, providerFactory, null, contentSpecEntity, nodeMapping);
contentSpecProvider.updateContentSpec(contentSpecEntity);
// Merge the relationships now all nodes have a mapping to a database node
mergeRelationships(nodeMapping, providerFactory);
contentSpecProvider.updateContentSpec(contentSpecEntity, processorData.getLogMessage());
} }
|
public class class_name {
protected synchronized Long getStaticId(String name, CPUResource cpuId)
{
String nameFinal = name + cpuId.getNumber();
if (staticIds.containsKey(nameFinal))
{
return staticIds.get(nameFinal);
} else
{
RTDeployStaticMessage deployMessage = new RTDeployStaticMessage(name, cpuId);
cachedStaticDeploys.add(deployMessage);
staticIds.put(nameFinal, deployMessage.getObjectReference());
return deployMessage.getObjectReference();
}
} }
|
public class class_name {
protected synchronized Long getStaticId(String name, CPUResource cpuId)
{
String nameFinal = name + cpuId.getNumber();
if (staticIds.containsKey(nameFinal))
{
return staticIds.get(nameFinal); // depends on control dependency: [if], data = [none]
} else
{
RTDeployStaticMessage deployMessage = new RTDeployStaticMessage(name, cpuId);
cachedStaticDeploys.add(deployMessage); // depends on control dependency: [if], data = [none]
staticIds.put(nameFinal, deployMessage.getObjectReference()); // depends on control dependency: [if], data = [none]
return deployMessage.getObjectReference(); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
void shutdown() throws InterruptedException {
List<NGSession> allSessions;
synchronized (lock) {
done = true;
allSessions =
Stream.concat(workingPool.stream(), idlePool.stream()).collect(Collectors.toList());
idlePool.clear();
workingPool.clear();
}
for (NGSession session : allSessions) {
session.shutdown();
}
// wait for all sessions to complete by either returning from waiting state or finishing their
// nails
long start = System.nanoTime();
for (NGSession session : allSessions) {
long timeout =
NGConstants.SESSION_TERMINATION_TIMEOUT_MILLIS
- TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS);
if (timeout < 1) {
// Give all threads a chance to finish or pick up already finished threads
timeout = 1;
}
session.join(timeout);
if (session.isAlive()) {
throw new IllegalStateException(
"NGSession has not completed in "
+ NGConstants.SESSION_TERMINATION_TIMEOUT_MILLIS
+ " ms");
}
}
} }
|
public class class_name {
void shutdown() throws InterruptedException {
List<NGSession> allSessions;
synchronized (lock) {
done = true;
allSessions =
Stream.concat(workingPool.stream(), idlePool.stream()).collect(Collectors.toList());
idlePool.clear();
workingPool.clear();
}
for (NGSession session : allSessions) {
session.shutdown();
}
// wait for all sessions to complete by either returning from waiting state or finishing their
// nails
long start = System.nanoTime();
for (NGSession session : allSessions) {
long timeout =
NGConstants.SESSION_TERMINATION_TIMEOUT_MILLIS
- TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS);
if (timeout < 1) {
// Give all threads a chance to finish or pick up already finished threads
timeout = 1; // depends on control dependency: [if], data = [none]
}
session.join(timeout);
if (session.isAlive()) {
throw new IllegalStateException(
"NGSession has not completed in "
+ NGConstants.SESSION_TERMINATION_TIMEOUT_MILLIS
+ " ms");
}
}
} }
|
public class class_name {
public List<List<Partition<CaptureSearchResult>>> getCaptureCalendar() {
if (month < 0 || month > 11)
throw new IllegalStateException("invalid month");
Partition<Partition<CaptureSearchResult>> curMonth = monthsByDay.get(month);
List<Partition<CaptureSearchResult>> monthDays = curMonth.list();
Calendar cal = Calendar.getInstance(calendarTimeZone);
cal.setTime(curMonth.getStart());
// DAY_OF_WEEK field has 1 for SUNDAY. Hence this makes week start Sunday.
int skipDays = cal.get(Calendar.DAY_OF_WEEK) - 1;
List<List<Partition<CaptureSearchResult>>> weeks = new ArrayList<List<Partition<CaptureSearchResult>>>();
List<Partition<CaptureSearchResult>> week = new ArrayList<Partition<CaptureSearchResult>>(7);
for (int i = 0; i < skipDays; i++) {
week.add(null);
}
for (Partition<CaptureSearchResult> p : monthDays) {
if (week == null)
week = new ArrayList<Partition<CaptureSearchResult>>(7);
week.add(p);
if (week.size() == 7) {
weeks.add(week);
week = null;
}
}
if (week != null) {
while (week.size() < 7)
week.add(null);
weeks.add(week);
}
return weeks;
} }
|
public class class_name {
public List<List<Partition<CaptureSearchResult>>> getCaptureCalendar() {
if (month < 0 || month > 11)
throw new IllegalStateException("invalid month");
Partition<Partition<CaptureSearchResult>> curMonth = monthsByDay.get(month);
List<Partition<CaptureSearchResult>> monthDays = curMonth.list();
Calendar cal = Calendar.getInstance(calendarTimeZone);
cal.setTime(curMonth.getStart());
// DAY_OF_WEEK field has 1 for SUNDAY. Hence this makes week start Sunday.
int skipDays = cal.get(Calendar.DAY_OF_WEEK) - 1;
List<List<Partition<CaptureSearchResult>>> weeks = new ArrayList<List<Partition<CaptureSearchResult>>>();
List<Partition<CaptureSearchResult>> week = new ArrayList<Partition<CaptureSearchResult>>(7);
for (int i = 0; i < skipDays; i++) {
week.add(null); // depends on control dependency: [for], data = [none]
}
for (Partition<CaptureSearchResult> p : monthDays) {
if (week == null)
week = new ArrayList<Partition<CaptureSearchResult>>(7);
week.add(p); // depends on control dependency: [for], data = [p]
if (week.size() == 7) {
weeks.add(week); // depends on control dependency: [if], data = [none]
week = null; // depends on control dependency: [if], data = [none]
}
}
if (week != null) {
while (week.size() < 7)
week.add(null);
weeks.add(week); // depends on control dependency: [if], data = [(week]
}
return weeks;
} }
|
public class class_name {
static boolean readElement(ByteBuf buffer, OutputStream byteBuffer) throws IOException {
for (; ; ) {
byte next = buffer.readByte();
if (next == SP) { // Space
return false;
} else if (next == CR) { // CR
next = buffer.readByte();
if (next == LF) { // LF
return true;
} else {
byteBuffer.write(next);
}
} else {
byteBuffer.write(next);
}
}
} }
|
public class class_name {
static boolean readElement(ByteBuf buffer, OutputStream byteBuffer) throws IOException {
for (; ; ) {
byte next = buffer.readByte();
if (next == SP) { // Space
return false; // depends on control dependency: [if], data = [none]
} else if (next == CR) { // CR
next = buffer.readByte(); // depends on control dependency: [if], data = [none]
if (next == LF) { // LF
return true; // depends on control dependency: [if], data = [none]
} else {
byteBuffer.write(next); // depends on control dependency: [if], data = [(next]
}
} else {
byteBuffer.write(next); // depends on control dependency: [if], data = [(next]
}
}
} }
|
public class class_name {
public void setRow(int row, double[] columns) {
if (columns.length != cols) {
throw new IllegalArgumentException(
"invalid number of columns: " + columns.length);
}
for (int col = 0; col < cols; ++col) {
sparseMatrix[row].set(col, columns[col]);
}
} }
|
public class class_name {
public void setRow(int row, double[] columns) {
if (columns.length != cols) {
throw new IllegalArgumentException(
"invalid number of columns: " + columns.length);
}
for (int col = 0; col < cols; ++col) {
sparseMatrix[row].set(col, columns[col]); // depends on control dependency: [for], data = [col]
}
} }
|
public class class_name {
@Override
public void initIterator(Partition dp, S config) {
jdbcNeo4JDeepJobConfig = initConfig(config, jdbcNeo4JDeepJobConfig);
this.jdbcReader = new JdbcNeo4JReader(jdbcNeo4JDeepJobConfig);
try {
this.jdbcReader.init(dp);
} catch(Exception e) {
throw new DeepGenericException("Unable to initialize JdbcReader", e);
}
} }
|
public class class_name {
@Override
public void initIterator(Partition dp, S config) {
jdbcNeo4JDeepJobConfig = initConfig(config, jdbcNeo4JDeepJobConfig);
this.jdbcReader = new JdbcNeo4JReader(jdbcNeo4JDeepJobConfig);
try {
this.jdbcReader.init(dp); // depends on control dependency: [try], data = [none]
} catch(Exception e) {
throw new DeepGenericException("Unable to initialize JdbcReader", e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
@Override
public String unit( CssFormatter formatter ) {
switch( super.toString() ) {
case "unit":
if( parameters.size() <= 1 ) {
return "";
} else {
return get( 1 ).stringValue( formatter );
}
case "convert":
return get( 1 ).stringValue( formatter );
case "sin":
case "cos":
case "tan":
case "length":
return "";
case "acos":
case "asin":
case "atan":
return "rad";
case "pow":
return get( 0 ).unit( formatter );
case "alpha":
case "red":
case "green":
case "blue":
case "rgba":
case "rgb":
case "argb":
case "saturate":
case "desaturate":
case "greyscale":
case "hsl":
case "hsla":
case "hue":
case "spin":
case "lighten":
case "darken":
case "fadein":
case "fadeout":
case "fade":
case "hsv":
case "hsva":
case "hsvhue":
case "contrast":
// color values has no unit
return "";
case "saturation":
case "lightness":
case "hsvsaturation":
case "hsvvalue":
case "luma":
case "luminance":
return "%";
case "if":
return get( 1 ).unit( formatter );
}
for( int i = 0; i < parameters.size(); i++ ) {
String unit = parameters.get( i ).unit( formatter );
if( !unit.isEmpty() ) {
return unit;
}
}
return "";
} }
|
public class class_name {
@Override
public String unit( CssFormatter formatter ) {
switch( super.toString() ) {
case "unit":
if( parameters.size() <= 1 ) {
return ""; // depends on control dependency: [if], data = [none]
} else {
return get( 1 ).stringValue( formatter ); // depends on control dependency: [if], data = [none]
}
case "convert":
return get( 1 ).stringValue( formatter );
case "sin":
case "cos":
case "tan":
case "length":
return "";
case "acos":
case "asin":
case "atan":
return "rad";
case "pow":
return get( 0 ).unit( formatter );
case "alpha":
case "red":
case "green":
case "blue":
case "rgba":
case "rgb":
case "argb":
case "saturate":
case "desaturate":
case "greyscale":
case "hsl":
case "hsla":
case "hue":
case "spin":
case "lighten":
case "darken":
case "fadein":
case "fadeout":
case "fade":
case "hsv":
case "hsva":
case "hsvhue":
case "contrast":
// color values has no unit
return "";
case "saturation":
case "lightness":
case "hsvsaturation":
case "hsvvalue":
case "luma":
case "luminance":
return "%";
case "if":
return get( 1 ).unit( formatter );
}
for( int i = 0; i < parameters.size(); i++ ) {
String unit = parameters.get( i ).unit( formatter );
if( !unit.isEmpty() ) {
return unit; // depends on control dependency: [if], data = [none]
}
}
return "";
} }
|
public class class_name {
public SuperActivityToast setProgress(int progress) {
if (this.mProgressBar == null) {
Log.e(getClass().getName(), "Could not set SuperActivityToast " +
"progress, are you sure you set the type to TYPE_PROGRESS_CIRCLE " +
"or TYPE_PROGRESS_BAR?");
return this;
}
this.mStyle.progress = progress;
this.mProgressBar.setProgress(progress);
return this;
} }
|
public class class_name {
public SuperActivityToast setProgress(int progress) {
if (this.mProgressBar == null) {
Log.e(getClass().getName(), "Could not set SuperActivityToast " +
"progress, are you sure you set the type to TYPE_PROGRESS_CIRCLE " +
"or TYPE_PROGRESS_BAR?");
// depends on control dependency: [if], data = [none]
return this;
// depends on control dependency: [if], data = [none]
}
this.mStyle.progress = progress;
this.mProgressBar.setProgress(progress);
return this;
} }
|
public class class_name {
public static String hash(Serializable obj) {
if (obj == null) {
return "";
}
StringBuilder hexString = new StringBuilder();
try {
MessageDigest m = MessageDigest.getInstance("SHA1");
m.update(SerializationUtils.serialize(obj));
byte[] mdbytes = m.digest();
for (byte mdbyte : mdbytes) {
hexString.append(Integer.toHexString(0xFF & mdbyte));
}
} catch (NoSuchAlgorithmException e) {
return "";
}
return hexString.toString();
} }
|
public class class_name {
public static String hash(Serializable obj) {
if (obj == null) {
return ""; // depends on control dependency: [if], data = [none]
}
StringBuilder hexString = new StringBuilder();
try {
MessageDigest m = MessageDigest.getInstance("SHA1");
m.update(SerializationUtils.serialize(obj)); // depends on control dependency: [try], data = [none]
byte[] mdbytes = m.digest();
for (byte mdbyte : mdbytes) {
hexString.append(Integer.toHexString(0xFF & mdbyte)); // depends on control dependency: [for], data = [mdbyte]
}
} catch (NoSuchAlgorithmException e) {
return "";
} // depends on control dependency: [catch], data = [none]
return hexString.toString();
} }
|
public class class_name {
private static ArrayList<Field> getFieldHierarchy(Class<?> cls) {
ArrayList<Field> list = new ArrayList<Field>();
for (Class<?> cl : buildClassHierarchy(cls)) {
list.addAll(Arrays.asList(cl.getDeclaredFields()));
}
return list;
} }
|
public class class_name {
private static ArrayList<Field> getFieldHierarchy(Class<?> cls) {
ArrayList<Field> list = new ArrayList<Field>();
for (Class<?> cl : buildClassHierarchy(cls)) {
list.addAll(Arrays.asList(cl.getDeclaredFields())); // depends on control dependency: [for], data = [cl]
}
return list;
} }
|
public class class_name {
private Iterable<BoxRetentionPolicyAssignment.Info> getAssignments(String type, int limit, String ... fields) {
QueryStringBuilder queryString = new QueryStringBuilder();
if (type != null) {
queryString.appendParam("type", type);
}
if (fields.length > 0) {
queryString.appendParam("fields", fields);
}
URL url = ASSIGNMENTS_URL_TEMPLATE.buildWithQuery(getAPI().getBaseURL(), queryString.toString(), getID());
return new BoxResourceIterable<BoxRetentionPolicyAssignment.Info>(getAPI(), url, limit) {
@Override
protected BoxRetentionPolicyAssignment.Info factory(JsonObject jsonObject) {
BoxRetentionPolicyAssignment assignment
= new BoxRetentionPolicyAssignment(getAPI(), jsonObject.get("id").asString());
return assignment.new Info(jsonObject);
}
};
} }
|
public class class_name {
private Iterable<BoxRetentionPolicyAssignment.Info> getAssignments(String type, int limit, String ... fields) {
QueryStringBuilder queryString = new QueryStringBuilder();
if (type != null) {
queryString.appendParam("type", type);
// depends on control dependency: [if], data = [none]
}
if (fields.length > 0) {
queryString.appendParam("fields", fields);
// depends on control dependency: [if], data = [none]
}
URL url = ASSIGNMENTS_URL_TEMPLATE.buildWithQuery(getAPI().getBaseURL(), queryString.toString(), getID());
return new BoxResourceIterable<BoxRetentionPolicyAssignment.Info>(getAPI(), url, limit) {
@Override
protected BoxRetentionPolicyAssignment.Info factory(JsonObject jsonObject) {
BoxRetentionPolicyAssignment assignment
= new BoxRetentionPolicyAssignment(getAPI(), jsonObject.get("id").asString());
return assignment.new Info(jsonObject);
}
};
} }
|
public class class_name {
protected synchronized void unsetKeyStore(KeystoreConfig config) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) {
Tr.event(tc, "Removing keystore: " + config.getId());
}
keystoreIdMap.remove(config.getId());
keystorePidMap.remove(config.getPid());
KeyStoreManager.getInstance().clearKeyStoreFromMap(config.getId());
KeyStoreManager.getInstance().clearKeyStoreFromMap(config.getPid());
for (Iterator<Map.Entry<String, RepertoireConfigService>> it = repertoireMap.entrySet().iterator(); it.hasNext();) {
RepertoireConfigService rep = it.next().getValue();
if (rep.getKeyStore() == config || rep.getTrustStore() == config) {
it.remove();
repertoirePropertiesMap.remove(rep.getAlias());
repertoirePIDMap.remove(rep.getPID());
}
}
} }
|
public class class_name {
protected synchronized void unsetKeyStore(KeystoreConfig config) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) {
Tr.event(tc, "Removing keystore: " + config.getId()); // depends on control dependency: [if], data = [none]
}
keystoreIdMap.remove(config.getId());
keystorePidMap.remove(config.getPid());
KeyStoreManager.getInstance().clearKeyStoreFromMap(config.getId());
KeyStoreManager.getInstance().clearKeyStoreFromMap(config.getPid());
for (Iterator<Map.Entry<String, RepertoireConfigService>> it = repertoireMap.entrySet().iterator(); it.hasNext();) {
RepertoireConfigService rep = it.next().getValue();
if (rep.getKeyStore() == config || rep.getTrustStore() == config) {
it.remove(); // depends on control dependency: [if], data = [none]
repertoirePropertiesMap.remove(rep.getAlias()); // depends on control dependency: [if], data = [none]
repertoirePIDMap.remove(rep.getPID()); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
@Override
@InterfaceAudience.Private
protected void processInbox(final RevisionList changes) {
Log.v(TAG, "processInbox() changes=" + changes.size());
// Generate a set of doc/rev IDs in the JSON format that _revs_diff wants:
// <http://wiki.apache.org/couchdb/HttpPostRevsDiff>
Map<String, List<String>> diffs = new HashMap<String, List<String>>();
for (RevisionInternal rev : changes) {
String docID = rev.getDocID();
List<String> revs = diffs.get(docID);
if (revs == null) {
revs = new ArrayList<String>();
diffs.put(docID, revs);
}
revs.add(rev.getRevID());
addPending(rev);
}
// Call _revs_diff on the target db:
Log.v(TAG, "%s: posting to /_revs_diff", this);
CustomFuture future = sendAsyncRequest("POST", "_revs_diff", diffs, new RemoteRequestCompletion() {
@Override
public void onCompletion(RemoteRequest remoteRequest, Response httpResponse, Object response, Throwable e) {
Log.v(TAG, "%s: got /_revs_diff response", this);
Map<String, Object> results = (Map<String, Object>) response;
if (e != null) {
setError(e);
} else {
if (results.size() != 0) {
// Go through the list of local changes again, selecting the ones the destination server
// said were missing and mapping them to a JSON dictionary in the form _bulk_docs wants:
List<Object> docsToSend = new ArrayList<Object>();
RevisionList revsToSend = new RevisionList();
long bufferedSize = 0;
for (RevisionInternal rev : changes) {
// Is this revision in the server's 'missing' list?
Map<String, Object> properties = null;
Map<String, Object> revResults = (Map<String, Object>) results.get(rev.getDocID());
if (revResults == null) {
removePending(rev);
continue;
}
List<String> revs = (List<String>) revResults.get("missing");
if (revs == null || !revs.contains(rev.getRevID())) {
removePending(rev);
continue;
}
// NOTE: force to load body by Database.loadRevisionBody()
// In SQLiteStore.loadRevisionBody() does not load data from database
// if sequence != 0 && body != null
rev.setSequence(0);
rev.setBody(null);
RevisionInternal loadedRev;
try {
loadedRev = db.loadRevisionBody(rev);
} catch (CouchbaseLiteException e1) {
Log.w(TAG, "%s Couldn't get local contents of %s", rev, PusherInternal.this);
continue;
}
if (loadedRev.getPropertyForKey("_removed") != null &&
((Boolean) loadedRev.getPropertyForKey("_removed")).booleanValue()) {
// Filter out _removed revision:
removePending(rev);
continue;
}
RevisionInternal populatedRev = transformRevision(loadedRev);
List<String> possibleAncestors = (List<String>) revResults.get("possible_ancestors");
properties = new HashMap<String, Object>(populatedRev.getProperties());
Map<String, Object> revisions = db.getRevisionHistoryDictStartingFromAnyAncestor(populatedRev, possibleAncestors);
properties.put("_revisions", revisions);
populatedRev.setProperties(properties);
// Strip any attachments already known to the target db:
if (properties.containsKey("_attachments")) {
// Look for the latest common ancestor and stub out older attachments:
int minRevPos = findCommonAncestor(populatedRev, possibleAncestors);
Status status = new Status(Status.OK);
if (!db.expandAttachments(populatedRev, minRevPos + 1, !dontSendMultipart, false, status)) {
Log.w(TAG, "%s: Couldn't expand attachments of %s", this, populatedRev);
continue;
}
properties = populatedRev.getProperties();
if (!dontSendMultipart && uploadMultipartRevision(populatedRev)) {
continue;
}
}
if (properties == null || !properties.containsKey("_id")) {
throw new IllegalStateException("properties must contain a document _id");
}
revsToSend.add(rev);
docsToSend.add(properties);
bufferedSize += JSONUtils.estimate(properties);
if (bufferedSize > kMaxBulkDocsObjectSize) {
uploadBulkDocs(docsToSend, revsToSend);
docsToSend = new ArrayList<Object>();
revsToSend = new RevisionList();
bufferedSize = 0;
}
}
// Post the revisions to the destination:
uploadBulkDocs(docsToSend, revsToSend);
} else {
// None of the revisions are new to the remote
for (RevisionInternal revisionInternal : changes) {
removePending(revisionInternal);
}
}
}
}
});
future.setQueue(pendingFutures);
pendingFutures.add(future);
pauseOrResume();
} }
|
public class class_name {
@Override
@InterfaceAudience.Private
protected void processInbox(final RevisionList changes) {
Log.v(TAG, "processInbox() changes=" + changes.size());
// Generate a set of doc/rev IDs in the JSON format that _revs_diff wants:
// <http://wiki.apache.org/couchdb/HttpPostRevsDiff>
Map<String, List<String>> diffs = new HashMap<String, List<String>>();
for (RevisionInternal rev : changes) {
String docID = rev.getDocID();
List<String> revs = diffs.get(docID);
if (revs == null) {
revs = new ArrayList<String>(); // depends on control dependency: [if], data = [none]
diffs.put(docID, revs); // depends on control dependency: [if], data = [none]
}
revs.add(rev.getRevID()); // depends on control dependency: [for], data = [rev]
addPending(rev); // depends on control dependency: [for], data = [rev]
}
// Call _revs_diff on the target db:
Log.v(TAG, "%s: posting to /_revs_diff", this);
CustomFuture future = sendAsyncRequest("POST", "_revs_diff", diffs, new RemoteRequestCompletion() {
@Override
public void onCompletion(RemoteRequest remoteRequest, Response httpResponse, Object response, Throwable e) {
Log.v(TAG, "%s: got /_revs_diff response", this);
Map<String, Object> results = (Map<String, Object>) response;
if (e != null) {
setError(e); // depends on control dependency: [if], data = [(e]
} else {
if (results.size() != 0) {
// Go through the list of local changes again, selecting the ones the destination server
// said were missing and mapping them to a JSON dictionary in the form _bulk_docs wants:
List<Object> docsToSend = new ArrayList<Object>();
RevisionList revsToSend = new RevisionList();
long bufferedSize = 0;
for (RevisionInternal rev : changes) {
// Is this revision in the server's 'missing' list?
Map<String, Object> properties = null;
Map<String, Object> revResults = (Map<String, Object>) results.get(rev.getDocID());
if (revResults == null) {
removePending(rev); // depends on control dependency: [if], data = [none]
continue;
}
List<String> revs = (List<String>) revResults.get("missing");
if (revs == null || !revs.contains(rev.getRevID())) {
removePending(rev); // depends on control dependency: [if], data = [none]
continue;
}
// NOTE: force to load body by Database.loadRevisionBody()
// In SQLiteStore.loadRevisionBody() does not load data from database
// if sequence != 0 && body != null
rev.setSequence(0); // depends on control dependency: [for], data = [rev]
rev.setBody(null); // depends on control dependency: [for], data = [rev]
RevisionInternal loadedRev;
try {
loadedRev = db.loadRevisionBody(rev); // depends on control dependency: [try], data = [none]
} catch (CouchbaseLiteException e1) {
Log.w(TAG, "%s Couldn't get local contents of %s", rev, PusherInternal.this);
continue;
}
if (loadedRev.getPropertyForKey("_removed") != null &&
((Boolean) loadedRev.getPropertyForKey("_removed")).booleanValue()) {
// Filter out _removed revision:
removePending(rev);
continue;
}
RevisionInternal populatedRev = transformRevision(loadedRev);
List<String> possibleAncestors = (List<String>) revResults.get("possible_ancestors");
properties = new HashMap<String, Object>(populatedRev.getProperties());
Map<String, Object> revisions = db.getRevisionHistoryDictStartingFromAnyAncestor(populatedRev, possibleAncestors);
properties.put("_revisions", revisions);
populatedRev.setProperties(properties);
// Strip any attachments already known to the target db:
if (properties.containsKey("_attachments")) {
// Look for the latest common ancestor and stub out older attachments:
int minRevPos = findCommonAncestor(populatedRev, possibleAncestors);
Status status = new Status(Status.OK);
if (!db.expandAttachments(populatedRev, minRevPos + 1, !dontSendMultipart, false, status)) {
Log.w(TAG, "%s: Couldn't expand attachments of %s", this, populatedRev);
continue;
} // depends on control dependency: [catch], data = [none]
properties = populatedRev.getProperties(); // depends on control dependency: [for], data = [none]
if (!dontSendMultipart && uploadMultipartRevision(populatedRev)) {
continue;
}
}
if (properties == null || !properties.containsKey("_id")) {
throw new IllegalStateException("properties must contain a document _id");
}
revsToSend.add(rev); // depends on control dependency: [if], data = [none]
docsToSend.add(properties); // depends on control dependency: [if], data = [none]
bufferedSize += JSONUtils.estimate(properties); // depends on control dependency: [if], data = [none]
if (bufferedSize > kMaxBulkDocsObjectSize) {
uploadBulkDocs(docsToSend, revsToSend); // depends on control dependency: [if], data = [none]
docsToSend = new ArrayList<Object>(); // depends on control dependency: [if], data = [none]
revsToSend = new RevisionList(); // depends on control dependency: [if], data = [none]
bufferedSize = 0; // depends on control dependency: [if], data = [none]
}
}
// Post the revisions to the destination:
uploadBulkDocs(docsToSend, revsToSend); // depends on control dependency: [if], data = [none]
} else {
// None of the revisions are new to the remote
for (RevisionInternal revisionInternal : changes) {
removePending(revisionInternal); // depends on control dependency: [for], data = [revisionInternal]
}
}
}
}
});
future.setQueue(pendingFutures);
pendingFutures.add(future);
pauseOrResume();
} }
|
public class class_name {
public static byte[] hexToBytes(String hexString) {
byte[] result = new byte[hexString.length() / 2];
for (int i = 0; i < result.length; ++i) {
int offset = i * 2;
result[i] = (byte) Integer.parseInt(hexString.substring(offset,
offset + 2), 16);
}
return result;
} }
|
public class class_name {
public static byte[] hexToBytes(String hexString) {
byte[] result = new byte[hexString.length() / 2];
for (int i = 0; i < result.length; ++i) {
int offset = i * 2;
result[i] = (byte) Integer.parseInt(hexString.substring(offset,
offset + 2), 16); // depends on control dependency: [for], data = [i]
}
return result;
} }
|
public class class_name {
@Override
public XValue call(Scope scope) {
List<String> res = new LinkedList<>();
for (Element e:scope.context()){
if ("script".equals(e.nodeName())){
res.add(e.data());
}else {
res.add(e.text());
}
}
return XValue.create(res);
} }
|
public class class_name {
@Override
public XValue call(Scope scope) {
List<String> res = new LinkedList<>();
for (Element e:scope.context()){
if ("script".equals(e.nodeName())){
res.add(e.data()); // depends on control dependency: [if], data = [none]
}else {
res.add(e.text()); // depends on control dependency: [if], data = [none]
}
}
return XValue.create(res);
} }
|
public class class_name {
public EClass getIOB() {
if (iobEClass == null) {
iobEClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(277);
}
return iobEClass;
} }
|
public class class_name {
public EClass getIOB() {
if (iobEClass == null) {
iobEClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(277); // depends on control dependency: [if], data = [none]
}
return iobEClass;
} }
|
public class class_name {
public ConfigurationPropertyName append(String elementValue) {
if (elementValue == null) {
return this;
}
Elements additionalElements = probablySingleElementOf(elementValue);
return new ConfigurationPropertyName(this.elements.append(additionalElements));
} }
|
public class class_name {
public ConfigurationPropertyName append(String elementValue) {
if (elementValue == null) {
return this; // depends on control dependency: [if], data = [none]
}
Elements additionalElements = probablySingleElementOf(elementValue);
return new ConfigurationPropertyName(this.elements.append(additionalElements));
} }
|
public class class_name {
public static boolean equalsSqref(final List<String> sqref1, final List<String> sqref2) {
if(sqref1.size() != sqref2.size()) {
return false;
}
Collections.sort(sqref1);
Collections.sort(sqref2);
final int size = sqref1.size();
for(int i=0; i < size; i++) {
if(!sqref1.get(i).equals(sqref2.get(i))) {
return false;
}
}
return true;
} }
|
public class class_name {
public static boolean equalsSqref(final List<String> sqref1, final List<String> sqref2) {
if(sqref1.size() != sqref2.size()) {
return false;
// depends on control dependency: [if], data = [none]
}
Collections.sort(sqref1);
Collections.sort(sqref2);
final int size = sqref1.size();
for(int i=0; i < size; i++) {
if(!sqref1.get(i).equals(sqref2.get(i))) {
return false;
// depends on control dependency: [if], data = [none]
}
}
return true;
} }
|
public class class_name {
public static void setGetStartedButton(String payload) {
if (payload == null || "".equals(payload)) {
logger.error("FbBotMill validation error: Get Started Button payload can't be null or empty!");
return;
}
Button button = new PostbackButton(null, ButtonType.POSTBACK, payload);
List<Button> buttonList = new ArrayList<Button>();
buttonList.add(button);
CallToActionsRequest request = new CallToActionsRequest(
ThreadState.NEW_THREAD, buttonList);
FbBotMillNetworkController.postThreadSetting(request);
} }
|
public class class_name {
public static void setGetStartedButton(String payload) {
if (payload == null || "".equals(payload)) {
logger.error("FbBotMill validation error: Get Started Button payload can't be null or empty!"); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
Button button = new PostbackButton(null, ButtonType.POSTBACK, payload);
List<Button> buttonList = new ArrayList<Button>();
buttonList.add(button);
CallToActionsRequest request = new CallToActionsRequest(
ThreadState.NEW_THREAD, buttonList);
FbBotMillNetworkController.postThreadSetting(request);
} }
|
public class class_name {
private void checkCloseConnection(ChannelFuture future) {
// If this connection is closing and the graceful shutdown has completed, close the connection
// once this operation completes.
if (closeListener != null && isGracefulShutdownComplete()) {
ChannelFutureListener closeListener = this.closeListener;
// This method could be called multiple times
// and we don't want to notify the closeListener multiple times.
this.closeListener = null;
try {
closeListener.operationComplete(future);
} catch (Exception e) {
throw new IllegalStateException("Close listener threw an unexpected exception", e);
}
}
} }
|
public class class_name {
private void checkCloseConnection(ChannelFuture future) {
// If this connection is closing and the graceful shutdown has completed, close the connection
// once this operation completes.
if (closeListener != null && isGracefulShutdownComplete()) {
ChannelFutureListener closeListener = this.closeListener;
// This method could be called multiple times
// and we don't want to notify the closeListener multiple times.
this.closeListener = null; // depends on control dependency: [if], data = [none]
try {
closeListener.operationComplete(future); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new IllegalStateException("Close listener threw an unexpected exception", e);
} // depends on control dependency: [catch], data = [none]
}
} }
|
public class class_name {
public static void parse(String[] args) {
for (int i = 0; args != null && i < args.length; i++) {
// First parse args that don't require an option
if ("--help".equals(args[i])) {
abort(null);
}
// Now parse args that must be accompanied by an option
if (i + 1 < args.length) {
parseArgumentWithOption(args[i], args[++i]); // force increment the counter
}
}
// Abort if required args were not given
if (config == null) {
abort("Configuration file or string was not given");
} else if (config.getAgents() == null || config.getAgents().isEmpty()) {
abort("Configuration is missing agents specification");
}
} }
|
public class class_name {
public static void parse(String[] args) {
for (int i = 0; args != null && i < args.length; i++) {
// First parse args that don't require an option
if ("--help".equals(args[i])) {
abort(null); // depends on control dependency: [if], data = [none]
}
// Now parse args that must be accompanied by an option
if (i + 1 < args.length) {
parseArgumentWithOption(args[i], args[++i]); // force increment the counter // depends on control dependency: [if], data = [none]
}
}
// Abort if required args were not given
if (config == null) {
abort("Configuration file or string was not given"); // depends on control dependency: [if], data = [none]
} else if (config.getAgents() == null || config.getAgents().isEmpty()) {
abort("Configuration is missing agents specification"); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public List<SubsetMove> getAllMoves(SubsetSolution solution) {
// check minimum size
if(minSizeReached(solution)){
return Collections.emptyList();
}
// get set of candidate IDs for deletion (possibly fixed IDs are discarded)
Set<Integer> removeCandidates = getRemoveCandidates(solution);
// check if there are any candidates to be removed
if(removeCandidates.isEmpty()){
return Collections.emptyList();
}
// create deletion move for all candidates
return removeCandidates.stream()
.map(del -> new DeletionMove(del))
.collect(Collectors.toList());
} }
|
public class class_name {
@Override
public List<SubsetMove> getAllMoves(SubsetSolution solution) {
// check minimum size
if(minSizeReached(solution)){
return Collections.emptyList(); // depends on control dependency: [if], data = [none]
}
// get set of candidate IDs for deletion (possibly fixed IDs are discarded)
Set<Integer> removeCandidates = getRemoveCandidates(solution);
// check if there are any candidates to be removed
if(removeCandidates.isEmpty()){
return Collections.emptyList(); // depends on control dependency: [if], data = [none]
}
// create deletion move for all candidates
return removeCandidates.stream()
.map(del -> new DeletionMove(del))
.collect(Collectors.toList());
} }
|
public class class_name {
public Long getMaxRequestSize()
{
if (childNode.getTextValueForPatternName("max-request-size") != null && !childNode.getTextValueForPatternName("max-request-size").equals("null")) {
return Long.valueOf(childNode.getTextValueForPatternName("max-request-size"));
}
return null;
} }
|
public class class_name {
public Long getMaxRequestSize()
{
if (childNode.getTextValueForPatternName("max-request-size") != null && !childNode.getTextValueForPatternName("max-request-size").equals("null")) {
return Long.valueOf(childNode.getTextValueForPatternName("max-request-size")); // depends on control dependency: [if], data = [(childNode.getTextValueForPatternName("max-request-size")]
}
return null;
} }
|
public class class_name {
public BatchScheduleActionCreateResult withScheduleActions(ScheduleAction... scheduleActions) {
if (this.scheduleActions == null) {
setScheduleActions(new java.util.ArrayList<ScheduleAction>(scheduleActions.length));
}
for (ScheduleAction ele : scheduleActions) {
this.scheduleActions.add(ele);
}
return this;
} }
|
public class class_name {
public BatchScheduleActionCreateResult withScheduleActions(ScheduleAction... scheduleActions) {
if (this.scheduleActions == null) {
setScheduleActions(new java.util.ArrayList<ScheduleAction>(scheduleActions.length)); // depends on control dependency: [if], data = [none]
}
for (ScheduleAction ele : scheduleActions) {
this.scheduleActions.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
public java.util.List<AllowedPrincipal> getAllowedPrincipals() {
if (allowedPrincipals == null) {
allowedPrincipals = new com.amazonaws.internal.SdkInternalList<AllowedPrincipal>();
}
return allowedPrincipals;
} }
|
public class class_name {
public java.util.List<AllowedPrincipal> getAllowedPrincipals() {
if (allowedPrincipals == null) {
allowedPrincipals = new com.amazonaws.internal.SdkInternalList<AllowedPrincipal>(); // depends on control dependency: [if], data = [none]
}
return allowedPrincipals;
} }
|
public class class_name {
public static BulkWriteResult acknowledged(final int insertedCount, final int matchedCount, final int removedCount,
final Integer modifiedCount, final List<BulkWriteUpsert> upserts) {
return new BulkWriteResult() {
@Override
public boolean wasAcknowledged() {
return true;
}
@Override
public int getInsertedCount() {
return insertedCount;
}
@Override
public int getMatchedCount() {
return matchedCount;
}
@Override
public int getDeletedCount() {
return removedCount;
}
@Override
@Deprecated
public boolean isModifiedCountAvailable() {
return true;
}
@Override
public int getModifiedCount() {
return modifiedCount;
}
@Override
public List<BulkWriteUpsert> getUpserts() {
return upserts;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BulkWriteResult that = (BulkWriteResult) o;
if (!that.wasAcknowledged()) {
return false;
}
if (insertedCount != that.getInsertedCount()) {
return false;
}
if (modifiedCount != null && !modifiedCount.equals(that.getModifiedCount())) {
return false;
}
if (removedCount != that.getDeletedCount()) {
return false;
}
if (matchedCount != that.getMatchedCount()) {
return false;
}
if (!upserts.equals(that.getUpserts())) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = upserts.hashCode();
result = 31 * result + insertedCount;
result = 31 * result + matchedCount;
result = 31 * result + removedCount;
result = 31 * result + (modifiedCount != null ? modifiedCount.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "AcknowledgedBulkWriteResult{"
+ "insertedCount=" + insertedCount
+ ", matchedCount=" + matchedCount
+ ", removedCount=" + removedCount
+ ", modifiedCount=" + modifiedCount
+ ", upserts=" + upserts
+ '}';
}
};
} }
|
public class class_name {
public static BulkWriteResult acknowledged(final int insertedCount, final int matchedCount, final int removedCount,
final Integer modifiedCount, final List<BulkWriteUpsert> upserts) {
return new BulkWriteResult() {
@Override
public boolean wasAcknowledged() {
return true;
}
@Override
public int getInsertedCount() {
return insertedCount;
}
@Override
public int getMatchedCount() {
return matchedCount;
}
@Override
public int getDeletedCount() {
return removedCount;
}
@Override
@Deprecated
public boolean isModifiedCountAvailable() {
return true;
}
@Override
public int getModifiedCount() {
return modifiedCount;
}
@Override
public List<BulkWriteUpsert> getUpserts() {
return upserts;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true; // depends on control dependency: [if], data = [none]
}
if (o == null || getClass() != o.getClass()) {
return false; // depends on control dependency: [if], data = [none]
}
BulkWriteResult that = (BulkWriteResult) o;
if (!that.wasAcknowledged()) {
return false; // depends on control dependency: [if], data = [none]
}
if (insertedCount != that.getInsertedCount()) {
return false; // depends on control dependency: [if], data = [none]
}
if (modifiedCount != null && !modifiedCount.equals(that.getModifiedCount())) {
return false; // depends on control dependency: [if], data = [none]
}
if (removedCount != that.getDeletedCount()) {
return false; // depends on control dependency: [if], data = [none]
}
if (matchedCount != that.getMatchedCount()) {
return false; // depends on control dependency: [if], data = [none]
}
if (!upserts.equals(that.getUpserts())) {
return false; // depends on control dependency: [if], data = [none]
}
return true;
}
@Override
public int hashCode() {
int result = upserts.hashCode();
result = 31 * result + insertedCount;
result = 31 * result + matchedCount;
result = 31 * result + removedCount;
result = 31 * result + (modifiedCount != null ? modifiedCount.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "AcknowledgedBulkWriteResult{"
+ "insertedCount=" + insertedCount
+ ", matchedCount=" + matchedCount
+ ", removedCount=" + removedCount
+ ", modifiedCount=" + modifiedCount
+ ", upserts=" + upserts
+ '}';
}
};
} }
|
public class class_name {
public boolean addEvent(Hook k, Event n) {
Set<Event> l = events.get(k);
if (l == null) {
l = new HashSet<>();
events.put(k, l);
}
return l.add(n);
} }
|
public class class_name {
public boolean addEvent(Hook k, Event n) {
Set<Event> l = events.get(k);
if (l == null) {
l = new HashSet<>(); // depends on control dependency: [if], data = [none]
events.put(k, l); // depends on control dependency: [if], data = [none]
}
return l.add(n);
} }
|
public class class_name {
public M remove(String... attrs) {
if (attrs != null)
for (String a : attrs) {
this.attrs.remove(a);
this._getModifyFlag().remove(a);
}
return (M)this;
} }
|
public class class_name {
public M remove(String... attrs) {
if (attrs != null)
for (String a : attrs) {
this.attrs.remove(a);
// depends on control dependency: [for], data = [a]
this._getModifyFlag().remove(a);
// depends on control dependency: [for], data = [a]
}
return (M)this;
} }
|
public class class_name {
public MarcField transformValue(MarcField field) {
String key = field.toTagIndicatorKey();
if (marcValueTransformerMap.isEmpty()) {
return field;
}
final MarcValueTransformer transformer = marcValueTransformerMap.containsKey(key) ?
marcValueTransformerMap.get(key) : marcValueTransformerMap.get(DEFAULT);
if (transformer != null) {
MarcField.Builder builder = MarcField.builder();
builder.tag(field.getTag()).indicator(field.getIndicator());
if (field.getValue() != null) {
builder.value(transformer.transform(field.getValue()));
}
// select only subfields configured for this tag
String subs = subfieldMap.containsKey(key) ? subfieldMap.get(key) : field.getSubfieldIds();
field.getSubfields().forEach(subfield ->
builder.subfield(subfield.getId(), subs.contains(subfield.getId()) ?
transformer.transform(subfield.getValue()) : subfield.getValue()));
return builder.build();
}
return field;
} }
|
public class class_name {
public MarcField transformValue(MarcField field) {
String key = field.toTagIndicatorKey();
if (marcValueTransformerMap.isEmpty()) {
return field; // depends on control dependency: [if], data = [none]
}
final MarcValueTransformer transformer = marcValueTransformerMap.containsKey(key) ?
marcValueTransformerMap.get(key) : marcValueTransformerMap.get(DEFAULT);
if (transformer != null) {
MarcField.Builder builder = MarcField.builder();
builder.tag(field.getTag()).indicator(field.getIndicator()); // depends on control dependency: [if], data = [none]
if (field.getValue() != null) {
builder.value(transformer.transform(field.getValue())); // depends on control dependency: [if], data = [(field.getValue()]
}
// select only subfields configured for this tag
String subs = subfieldMap.containsKey(key) ? subfieldMap.get(key) : field.getSubfieldIds();
field.getSubfields().forEach(subfield ->
builder.subfield(subfield.getId(), subs.contains(subfield.getId()) ?
transformer.transform(subfield.getValue()) : subfield.getValue())); // depends on control dependency: [if], data = [none]
return builder.build(); // depends on control dependency: [if], data = [none]
}
return field;
} }
|
public class class_name {
private Session getSession(boolean isSingleton) {
final MailAccount mailAccount = this.mailAccount;
Authenticator authenticator = null;
if (mailAccount.isAuth()) {
authenticator = new UserPassAuthenticator(mailAccount.getUser(), mailAccount.getPass());
}
return isSingleton ? Session.getDefaultInstance(mailAccount.getSmtpProps(), authenticator) //
: Session.getInstance(mailAccount.getSmtpProps(), authenticator);
} }
|
public class class_name {
private Session getSession(boolean isSingleton) {
final MailAccount mailAccount = this.mailAccount;
Authenticator authenticator = null;
if (mailAccount.isAuth()) {
authenticator = new UserPassAuthenticator(mailAccount.getUser(), mailAccount.getPass());
// depends on control dependency: [if], data = [none]
}
return isSingleton ? Session.getDefaultInstance(mailAccount.getSmtpProps(), authenticator) //
: Session.getInstance(mailAccount.getSmtpProps(), authenticator);
} }
|
public class class_name {
private void addActivation(MavenProfileDescriptor mavenProfileDescriptor, Activation activation, Store store) {
if (null == activation) {
return;
}
MavenProfileActivationDescriptor profileActivationDescriptor = store.create(MavenProfileActivationDescriptor.class);
mavenProfileDescriptor.setActivation(profileActivationDescriptor);
profileActivationDescriptor.setJdk(activation.getJdk());
profileActivationDescriptor.setActiveByDefault(activation.isActiveByDefault());
ActivationFile activationFile = activation.getFile();
if (null != activationFile) {
MavenActivationFileDescriptor activationFileDescriptor = store.create(MavenActivationFileDescriptor.class);
profileActivationDescriptor.setActivationFile(activationFileDescriptor);
activationFileDescriptor.setExists(activationFile.getExists());
activationFileDescriptor.setMissing(activationFile.getMissing());
}
ActivationOS os = activation.getOs();
if (null != os) {
MavenActivationOSDescriptor osDescriptor = store.create(MavenActivationOSDescriptor.class);
profileActivationDescriptor.setActivationOS(osDescriptor);
osDescriptor.setArch(os.getArch());
osDescriptor.setFamily(os.getFamily());
osDescriptor.setName(os.getName());
osDescriptor.setVersion(os.getVersion());
}
ActivationProperty property = activation.getProperty();
if (null != property) {
PropertyDescriptor propertyDescriptor = store.create(PropertyDescriptor.class);
profileActivationDescriptor.setProperty(propertyDescriptor);
propertyDescriptor.setName(property.getName());
propertyDescriptor.setValue(property.getValue());
}
} }
|
public class class_name {
private void addActivation(MavenProfileDescriptor mavenProfileDescriptor, Activation activation, Store store) {
if (null == activation) {
return; // depends on control dependency: [if], data = [none]
}
MavenProfileActivationDescriptor profileActivationDescriptor = store.create(MavenProfileActivationDescriptor.class);
mavenProfileDescriptor.setActivation(profileActivationDescriptor);
profileActivationDescriptor.setJdk(activation.getJdk());
profileActivationDescriptor.setActiveByDefault(activation.isActiveByDefault());
ActivationFile activationFile = activation.getFile();
if (null != activationFile) {
MavenActivationFileDescriptor activationFileDescriptor = store.create(MavenActivationFileDescriptor.class);
profileActivationDescriptor.setActivationFile(activationFileDescriptor); // depends on control dependency: [if], data = [none]
activationFileDescriptor.setExists(activationFile.getExists()); // depends on control dependency: [if], data = [none]
activationFileDescriptor.setMissing(activationFile.getMissing()); // depends on control dependency: [if], data = [none]
}
ActivationOS os = activation.getOs();
if (null != os) {
MavenActivationOSDescriptor osDescriptor = store.create(MavenActivationOSDescriptor.class);
profileActivationDescriptor.setActivationOS(osDescriptor); // depends on control dependency: [if], data = [none]
osDescriptor.setArch(os.getArch()); // depends on control dependency: [if], data = [none]
osDescriptor.setFamily(os.getFamily()); // depends on control dependency: [if], data = [none]
osDescriptor.setName(os.getName()); // depends on control dependency: [if], data = [none]
osDescriptor.setVersion(os.getVersion()); // depends on control dependency: [if], data = [none]
}
ActivationProperty property = activation.getProperty();
if (null != property) {
PropertyDescriptor propertyDescriptor = store.create(PropertyDescriptor.class);
profileActivationDescriptor.setProperty(propertyDescriptor); // depends on control dependency: [if], data = [none]
propertyDescriptor.setName(property.getName()); // depends on control dependency: [if], data = [none]
propertyDescriptor.setValue(property.getValue()); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public boolean printToolbarData(boolean bFieldsFound, PrintWriter out, int iHtmlAttributes)
{
int iNumCols = ((ToolScreen)this.getScreenField()).getSFieldCount();
for (int iIndex = 0; iIndex < iNumCols; iIndex++)
{
ScreenField sField = ((ToolScreen)this.getScreenField()).getSField(iIndex);
if (sField.getConverter() == null)
if (sField instanceof SCannedBox)
{
SCannedBox button = (SCannedBox)sField; // Found the toolscreen
String strCommand = button.getButtonCommand();
if (strCommand.equalsIgnoreCase(MenuConstants.BACK))
{ // Ignore back for HTML
}
else if (strCommand.equalsIgnoreCase(MenuConstants.HELP))
{ // Ignore help for HTML
}
else if (strCommand.equalsIgnoreCase(MenuConstants.RESET))
{ // Special case - for reset do an HTML reset
out.println("<input type=\"Reset\"/>");
bFieldsFound = false; // Don't need Submit/Reset button
}
else if ((strCommand.equalsIgnoreCase(MenuConstants.FIRST))
|| (strCommand.equalsIgnoreCase(MenuConstants.PREVIOUS))
|| (strCommand.equalsIgnoreCase(MenuConstants.NEXT))
|| (strCommand.equalsIgnoreCase(MenuConstants.LAST))
|| (strCommand.equalsIgnoreCase(MenuConstants.SUBMIT))
|| (strCommand.equalsIgnoreCase(MenuConstants.DELETE)))
{ // Valid command - send it as a post command
out.println("<input type=\"submit\" name=\"" + DBParams.COMMAND + "\" value=\"" + strCommand + "\"/>");
bFieldsFound = false; // Don't need Submit button
}
else if (strCommand.equalsIgnoreCase(MenuConstants.FORM))
{ // Valid command - send it as a post command
if (this.getMainRecord() != null)
{
String strRecord = this.getMainRecord().getClass().getName().toString();
String strLink = "?" + DBParams.RECORD + "=" + strRecord + "&" + DBParams.COMMAND + "=" + MenuConstants.REFRESH;
out.println("<input type=\"button\" value=" + strCommand + " onclick=\"window.open('" + strLink + "','_top');\"/>");
}
}
else if (strCommand.equalsIgnoreCase(MenuConstants.LOOKUP))
{ // Valid command - send it as a post command
if (this.getMainRecord() != null)
{
String strRecord = this.getMainRecord().getClass().getName().toString();
String strLink = "?" + DBParams.RECORD + "=" + strRecord;
out.println("<input type=\"button\" value=\"Lookup\" onclick=\"window.open('" + strLink + "','_top');\"/>");
}
}
else
{ // Valid command - send it as a post command
//+ Add code here to process a doCommand(xxx)
}
}
}
String strCommand = MenuConstants.SUBMIT;
if (this.getScreenField().getParentScreen() instanceof GridScreen)
{
strCommand = MenuConstants.LOOKUP;
if (this.getScreenField().getParentScreen().getEditing())
bFieldsFound = true; // Need these buttons for grid input
}
if (bFieldsFound)
{
out.println("<input type=\"submit\" name=\"" + DBParams.COMMAND + "\" value=\"" + strCommand + "\"/>");
out.println("<input type=\"Reset\"/>");
}
return bFieldsFound;
} }
|
public class class_name {
public boolean printToolbarData(boolean bFieldsFound, PrintWriter out, int iHtmlAttributes)
{
int iNumCols = ((ToolScreen)this.getScreenField()).getSFieldCount();
for (int iIndex = 0; iIndex < iNumCols; iIndex++)
{
ScreenField sField = ((ToolScreen)this.getScreenField()).getSField(iIndex);
if (sField.getConverter() == null)
if (sField instanceof SCannedBox)
{
SCannedBox button = (SCannedBox)sField; // Found the toolscreen
String strCommand = button.getButtonCommand();
if (strCommand.equalsIgnoreCase(MenuConstants.BACK))
{ // Ignore back for HTML
}
else if (strCommand.equalsIgnoreCase(MenuConstants.HELP))
{ // Ignore help for HTML
}
else if (strCommand.equalsIgnoreCase(MenuConstants.RESET))
{ // Special case - for reset do an HTML reset
out.println("<input type=\"Reset\"/>"); // depends on control dependency: [if], data = [none]
bFieldsFound = false; // Don't need Submit/Reset button // depends on control dependency: [if], data = [none]
}
else if ((strCommand.equalsIgnoreCase(MenuConstants.FIRST))
|| (strCommand.equalsIgnoreCase(MenuConstants.PREVIOUS))
|| (strCommand.equalsIgnoreCase(MenuConstants.NEXT))
|| (strCommand.equalsIgnoreCase(MenuConstants.LAST))
|| (strCommand.equalsIgnoreCase(MenuConstants.SUBMIT))
|| (strCommand.equalsIgnoreCase(MenuConstants.DELETE)))
{ // Valid command - send it as a post command
out.println("<input type=\"submit\" name=\"" + DBParams.COMMAND + "\" value=\"" + strCommand + "\"/>"); // depends on control dependency: [if], data = [none]
bFieldsFound = false; // Don't need Submit button // depends on control dependency: [if], data = [none]
}
else if (strCommand.equalsIgnoreCase(MenuConstants.FORM))
{ // Valid command - send it as a post command
if (this.getMainRecord() != null)
{
String strRecord = this.getMainRecord().getClass().getName().toString();
String strLink = "?" + DBParams.RECORD + "=" + strRecord + "&" + DBParams.COMMAND + "=" + MenuConstants.REFRESH;
out.println("<input type=\"button\" value=" + strCommand + " onclick=\"window.open('" + strLink + "','_top');\"/>"); // depends on control dependency: [if], data = [none]
}
}
else if (strCommand.equalsIgnoreCase(MenuConstants.LOOKUP))
{ // Valid command - send it as a post command
if (this.getMainRecord() != null)
{
String strRecord = this.getMainRecord().getClass().getName().toString();
String strLink = "?" + DBParams.RECORD + "=" + strRecord;
out.println("<input type=\"button\" value=\"Lookup\" onclick=\"window.open('" + strLink + "','_top');\"/>"); // depends on control dependency: [if], data = [none]
}
}
else
{ // Valid command - send it as a post command
//+ Add code here to process a doCommand(xxx)
}
}
}
String strCommand = MenuConstants.SUBMIT;
if (this.getScreenField().getParentScreen() instanceof GridScreen)
{
strCommand = MenuConstants.LOOKUP; // depends on control dependency: [if], data = [none]
if (this.getScreenField().getParentScreen().getEditing())
bFieldsFound = true; // Need these buttons for grid input
}
if (bFieldsFound)
{
out.println("<input type=\"submit\" name=\"" + DBParams.COMMAND + "\" value=\"" + strCommand + "\"/>"); // depends on control dependency: [if], data = [none]
out.println("<input type=\"Reset\"/>"); // depends on control dependency: [if], data = [none]
}
return bFieldsFound;
} }
|
public class class_name {
public void appendArg(String key, String value) {
if (args != null) {
args.put(key, value);
}
} }
|
public class class_name {
public void appendArg(String key, String value) {
if (args != null) {
args.put(key, value); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private static List<String> tokeniseOnLowercaseToUppercase( String name ) {
List<String> splits = new ArrayList<>();
// the following stores data in pairs (start, finish, start, ...)
ArrayList<Integer> candidateBoundaries = new ArrayList<>();
// now process the array looking for boundaries
for ( Integer index = 0; index < name.length(); index++ ) {
if ( index == 0 ) {
// the first character is always a boundary
candidateBoundaries.add( index );
}
else {
if ( Character.isUpperCase( name.codePointAt( index ) )
&& Character.isLowerCase( name.codePointAt( index - 1 ) ) ) {
candidateBoundaries.add( index - 1 );
candidateBoundaries.add( index );
}
}
// now check whether this is the terminal character.
// and record it to give us the final boundary
if ( index == name.length() - 1 ) {
candidateBoundaries.add( index );
}
}
if ( candidateBoundaries.size() % 2 == 1 ) {
LOGGER.warn(
"Odd number of boundaries found for: \"{}\"",
name );
}
for ( int i = 0; i < candidateBoundaries.size(); i += 2 ) {
splits.add( name.substring( candidateBoundaries.get( i ), candidateBoundaries.get( i + 1 ) + 1 ) );
}
return splits;
} }
|
public class class_name {
private static List<String> tokeniseOnLowercaseToUppercase( String name ) {
List<String> splits = new ArrayList<>();
// the following stores data in pairs (start, finish, start, ...)
ArrayList<Integer> candidateBoundaries = new ArrayList<>();
// now process the array looking for boundaries
for ( Integer index = 0; index < name.length(); index++ ) {
if ( index == 0 ) {
// the first character is always a boundary
candidateBoundaries.add( index ); // depends on control dependency: [if], data = [( index]
}
else {
if ( Character.isUpperCase( name.codePointAt( index ) )
&& Character.isLowerCase( name.codePointAt( index - 1 ) ) ) {
candidateBoundaries.add( index - 1 ); // depends on control dependency: [if], data = [none]
candidateBoundaries.add( index ); // depends on control dependency: [if], data = [none]
}
}
// now check whether this is the terminal character.
// and record it to give us the final boundary
if ( index == name.length() - 1 ) {
candidateBoundaries.add( index ); // depends on control dependency: [if], data = [( index]
}
}
if ( candidateBoundaries.size() % 2 == 1 ) {
LOGGER.warn(
"Odd number of boundaries found for: \"{}\"",
name ); // depends on control dependency: [if], data = [none]
}
for ( int i = 0; i < candidateBoundaries.size(); i += 2 ) {
splits.add( name.substring( candidateBoundaries.get( i ), candidateBoundaries.get( i + 1 ) + 1 ) ); // depends on control dependency: [for], data = [i]
}
return splits;
} }
|
public class class_name {
public final AbstractItem removeFirstMatching(final Filter filter, PersistentTransaction transaction) throws MessageStoreException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "removeFirstMatching", new Object[] { filter, transaction});
AbstractItem found = null;
Link link = getHead();
while (link != null && found == null)
{
found = ((AbstractItemLink)link).removeIfMatches(filter, transaction);
if (found == null)
{
// Defect 493652/PK59872
// We need to lock on the list at this point as our current link
// may have been unlinked by another thread during the matches()
// call. In that case we need to start at the head of the list again
// as the next/previous pointers for the unlinked link will not be
// set.
synchronized(this)
{
if (link.isPhysicallyUnlinked())
{
// We have been unlinked while we were doing the match.
// Start again at the beginning of the list.
link = getHead();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Current link is PhysicallyUnlinked so returning to beginning of list.");
}
else
{
link = link.getNextLogicalLink();
}
}
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "removeFirstMatching", found);
return found;
} }
|
public class class_name {
public final AbstractItem removeFirstMatching(final Filter filter, PersistentTransaction transaction) throws MessageStoreException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "removeFirstMatching", new Object[] { filter, transaction});
AbstractItem found = null;
Link link = getHead();
while (link != null && found == null)
{
found = ((AbstractItemLink)link).removeIfMatches(filter, transaction);
if (found == null)
{
// Defect 493652/PK59872
// We need to lock on the list at this point as our current link
// may have been unlinked by another thread during the matches()
// call. In that case we need to start at the head of the list again
// as the next/previous pointers for the unlinked link will not be
// set.
synchronized(this) // depends on control dependency: [if], data = [none]
{
if (link.isPhysicallyUnlinked())
{
// We have been unlinked while we were doing the match.
// Start again at the beginning of the list.
link = getHead(); // depends on control dependency: [if], data = [none]
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Current link is PhysicallyUnlinked so returning to beginning of list.");
}
else
{
link = link.getNextLogicalLink(); // depends on control dependency: [if], data = [none]
}
}
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "removeFirstMatching", found);
return found;
} }
|
public class class_name {
public void disconnect() {
if (sendThread != null) {
sendThread.interrupt();
try {
sendThread.join();
} catch (InterruptedException e) {
}
sendThread = null;
}
if (receiveThread != null) {
receiveThread.interrupt();
try {
receiveThread.join();
} catch (InterruptedException e) {
}
receiveThread = null;
}
if(transactionCompleted.availablePermits() < 0)
transactionCompleted.release(transactionCompleted.availablePermits());
transactionCompleted.drainPermits();
logger.trace("Transaction completed permit count -> {}", transactionCompleted.availablePermits());
if (this.serialPort != null) {
this.serialPort.close();
this.serialPort = null;
}
logger.info("Disconnected from serial port");
} }
|
public class class_name {
public void disconnect() {
if (sendThread != null) {
sendThread.interrupt(); // depends on control dependency: [if], data = [none]
try {
sendThread.join(); // depends on control dependency: [try], data = [none]
} catch (InterruptedException e) {
} // depends on control dependency: [catch], data = [none]
sendThread = null; // depends on control dependency: [if], data = [none]
}
if (receiveThread != null) {
receiveThread.interrupt(); // depends on control dependency: [if], data = [none]
try {
receiveThread.join(); // depends on control dependency: [try], data = [none]
} catch (InterruptedException e) {
} // depends on control dependency: [catch], data = [none]
receiveThread = null; // depends on control dependency: [if], data = [none]
}
if(transactionCompleted.availablePermits() < 0)
transactionCompleted.release(transactionCompleted.availablePermits());
transactionCompleted.drainPermits();
logger.trace("Transaction completed permit count -> {}", transactionCompleted.availablePermits());
if (this.serialPort != null) {
this.serialPort.close(); // depends on control dependency: [if], data = [none]
this.serialPort = null; // depends on control dependency: [if], data = [none]
}
logger.info("Disconnected from serial port");
} }
|
public class class_name {
@Override
public String[] getOptionLabels() {
String[] labels = new String[valueOptions.size()];
for (int i = 0; i < labels.length; i++) {
labels[i] = valueOptions.get(i)[1];
}
return labels;
} }
|
public class class_name {
@Override
public String[] getOptionLabels() {
String[] labels = new String[valueOptions.size()];
for (int i = 0; i < labels.length; i++) {
labels[i] = valueOptions.get(i)[1]; // depends on control dependency: [for], data = [i]
}
return labels;
} }
|
public class class_name {
@Override
public void close() {
try {
super.close();
if(!emptyPages.isEmpty() && header instanceof TreeIndexHeader) {
// write the list of empty pages to the end of the file
((TreeIndexHeader) header).writeEmptyPages(emptyPages, file);
}
((TreeIndexHeader) header).setLargestPageID(nextPageID);
header.writeHeader(file);
file.close();
}
catch(IOException e) {
throw new RuntimeException(e);
}
} }
|
public class class_name {
@Override
public void close() {
try {
super.close(); // depends on control dependency: [try], data = [none]
if(!emptyPages.isEmpty() && header instanceof TreeIndexHeader) {
// write the list of empty pages to the end of the file
((TreeIndexHeader) header).writeEmptyPages(emptyPages, file); // depends on control dependency: [if], data = [none]
}
((TreeIndexHeader) header).setLargestPageID(nextPageID); // depends on control dependency: [try], data = [none]
header.writeHeader(file); // depends on control dependency: [try], data = [none]
file.close(); // depends on control dependency: [try], data = [none]
}
catch(IOException e) {
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static void closeAll() {
if(CollectionUtil.isNotEmpty(dsMap)){
for(MongoDS ds : dsMap.values()) {
ds.close();
}
dsMap.clear();
}
} }
|
public class class_name {
public static void closeAll() {
if(CollectionUtil.isNotEmpty(dsMap)){
for(MongoDS ds : dsMap.values()) {
ds.close();
// depends on control dependency: [for], data = [ds]
}
dsMap.clear();
// depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@SuppressWarnings("fallthrough")
public static int log2(double x, RoundingMode mode) {
N.checkArgument(x > 0.0 && isFinite(x), "x must be positive and finite");
int exponent = getExponent(x);
if (!isNormal(x)) {
return log2(x * IMPLICIT_BIT, mode) - SIGNIFICAND_BITS;
// Do the calculation on a normal value.
}
// x is positive, finite, and normal
boolean increment;
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(isPowerOfTwo(x));
// fall through
case FLOOR:
increment = false;
break;
case CEILING:
increment = !isPowerOfTwo(x);
break;
case DOWN:
increment = exponent < 0 & !isPowerOfTwo(x);
break;
case UP:
increment = exponent >= 0 & !isPowerOfTwo(x);
break;
case HALF_DOWN:
case HALF_EVEN:
case HALF_UP:
double xScaled = scaleNormalize(x);
// sqrt(2) is irrational, and the spec is relative to the "exact numerical result,"
// so log2(x) is never exactly exponent + 0.5.
increment = (xScaled * xScaled) > 2.0;
break;
default:
throw new AssertionError();
}
return increment ? exponent + 1 : exponent;
} }
|
public class class_name {
@SuppressWarnings("fallthrough")
public static int log2(double x, RoundingMode mode) {
N.checkArgument(x > 0.0 && isFinite(x), "x must be positive and finite");
int exponent = getExponent(x);
if (!isNormal(x)) {
return log2(x * IMPLICIT_BIT, mode) - SIGNIFICAND_BITS;
// depends on control dependency: [if], data = [none]
// Do the calculation on a normal value.
}
// x is positive, finite, and normal
boolean increment;
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(isPowerOfTwo(x));
// fall through
case FLOOR:
increment = false;
break;
case CEILING:
increment = !isPowerOfTwo(x);
break;
case DOWN:
increment = exponent < 0 & !isPowerOfTwo(x);
break;
case UP:
increment = exponent >= 0 & !isPowerOfTwo(x);
break;
case HALF_DOWN:
case HALF_EVEN:
case HALF_UP:
double xScaled = scaleNormalize(x);
// sqrt(2) is irrational, and the spec is relative to the "exact numerical result,"
// so log2(x) is never exactly exponent + 0.5.
increment = (xScaled * xScaled) > 2.0;
break;
default:
throw new AssertionError();
}
return increment ? exponent + 1 : exponent;
} }
|
public class class_name {
public static boolean validateExprsForIndexesAndMVs(List<AbstractExpression> checkList, StringBuffer msg, boolean isMV) {
for (AbstractExpression expr : checkList) {
if (!expr.isValidExprForIndexesAndMVs(msg, isMV)) {
return false;
}
}
return true;
} }
|
public class class_name {
public static boolean validateExprsForIndexesAndMVs(List<AbstractExpression> checkList, StringBuffer msg, boolean isMV) {
for (AbstractExpression expr : checkList) {
if (!expr.isValidExprForIndexesAndMVs(msg, isMV)) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
} }
|
public class class_name {
private void processTextNode(Segment textSegment) {
context.setCurrentSegment(textSegment);
String elementText = textSegment.toString();
Matcher matcher = VUE_MUSTACHE_PATTERN.matcher(elementText);
int lastEnd = 0;
StringBuilder newText = new StringBuilder();
while (matcher.find()) {
int start = matcher.start();
int end = matcher.end();
if (start > 0) {
newText.append(elementText, lastEnd, start);
}
currentExpressionReturnType = TypeName.get(String.class);
String expressionString = elementText.substring(start + 2, end - 2).trim();
String processedExpression = processExpression(expressionString);
newText.append("{{ ").append(processedExpression).append(" }}");
lastEnd = end;
}
if (lastEnd > 0) {
newText.append(elementText.substring(lastEnd));
outputDocument.replace(textSegment, newText.toString());
}
} }
|
public class class_name {
private void processTextNode(Segment textSegment) {
context.setCurrentSegment(textSegment);
String elementText = textSegment.toString();
Matcher matcher = VUE_MUSTACHE_PATTERN.matcher(elementText);
int lastEnd = 0;
StringBuilder newText = new StringBuilder();
while (matcher.find()) {
int start = matcher.start();
int end = matcher.end();
if (start > 0) {
newText.append(elementText, lastEnd, start); // depends on control dependency: [if], data = [none]
}
currentExpressionReturnType = TypeName.get(String.class); // depends on control dependency: [while], data = [none]
String expressionString = elementText.substring(start + 2, end - 2).trim();
String processedExpression = processExpression(expressionString);
newText.append("{{ ").append(processedExpression).append(" }}"); // depends on control dependency: [while], data = [none]
lastEnd = end; // depends on control dependency: [while], data = [none]
}
if (lastEnd > 0) {
newText.append(elementText.substring(lastEnd)); // depends on control dependency: [if], data = [(lastEnd]
outputDocument.replace(textSegment, newText.toString()); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public E poll()
{
final long consumerIndex = this.lpConsumerIndex();
final long offset = calcElementOffset(consumerIndex);
// local load of field to avoid repeated loads after volatile reads
final E[] buffer = this.buffer;
final E e = lvElement(buffer, offset);// LoadLoad
if (null == e)
{
return null;
}
soElement(buffer, offset, null);// StoreStore
soConsumerIndex(consumerIndex + 1); // ordered store -> atomic and ordered for size()
return e;
} }
|
public class class_name {
@Override
public E poll()
{
final long consumerIndex = this.lpConsumerIndex();
final long offset = calcElementOffset(consumerIndex);
// local load of field to avoid repeated loads after volatile reads
final E[] buffer = this.buffer;
final E e = lvElement(buffer, offset);// LoadLoad
if (null == e)
{
return null; // depends on control dependency: [if], data = [none]
}
soElement(buffer, offset, null);// StoreStore
soConsumerIndex(consumerIndex + 1); // ordered store -> atomic and ordered for size()
return e;
} }
|
public class class_name {
private static String getResourceIpAddress(HttpAcceptSession acceptSession, String parameterName) {
String resourceIpAddress = null;
ResourceAddress resourceAddress = null;
switch (parameterName) {
case FORWARDED_FOR:
resourceAddress = acceptSession.getRemoteAddress();
break;
case FORWARDED_BY:
resourceAddress = acceptSession.getLocalAddress();
break;
}
ResourceAddress tcpResourceAddress = resourceAddress.findTransport("tcp");
if (tcpResourceAddress != null) {
URI resource = tcpResourceAddress.getResource();
resourceIpAddress = resource.getHost();
}
return resourceIpAddress;
} }
|
public class class_name {
private static String getResourceIpAddress(HttpAcceptSession acceptSession, String parameterName) {
String resourceIpAddress = null;
ResourceAddress resourceAddress = null;
switch (parameterName) {
case FORWARDED_FOR:
resourceAddress = acceptSession.getRemoteAddress();
break;
case FORWARDED_BY:
resourceAddress = acceptSession.getLocalAddress();
break;
}
ResourceAddress tcpResourceAddress = resourceAddress.findTransport("tcp");
if (tcpResourceAddress != null) {
URI resource = tcpResourceAddress.getResource();
resourceIpAddress = resource.getHost(); // depends on control dependency: [if], data = [none]
}
return resourceIpAddress;
} }
|
public class class_name {
public synchronized static void setStdout(OutputStream os)
{
if (_stdoutStream == null) {
initStdout();
}
if (os == _systemErr || os == _systemOut) {
return;
}
if (os instanceof WriteStream) {
WriteStream out = (WriteStream) os;
/*
if (out.getSource() == StdoutStream.create()
|| out.getSource() == StderrStream.create()) {
return;
}
*/
}
_stdoutStream.setStream(os);
} }
|
public class class_name {
public synchronized static void setStdout(OutputStream os)
{
if (_stdoutStream == null) {
initStdout(); // depends on control dependency: [if], data = [none]
}
if (os == _systemErr || os == _systemOut) {
return; // depends on control dependency: [if], data = [none]
}
if (os instanceof WriteStream) {
WriteStream out = (WriteStream) os;
/*
if (out.getSource() == StdoutStream.create()
|| out.getSource() == StderrStream.create()) {
return;
}
*/
}
_stdoutStream.setStream(os);
} }
|
public class class_name {
private void aggregateTasklets(final AggregateTriggerType type) {
final List<WorkerToMasterReport> workerToMasterReports = new ArrayList<>();
final List<Object> results = new ArrayList<>();
final List<Integer> aggregatedTasklets = new ArrayList<>();
// Synchronization to prevent duplication of work on the same aggregation function on the same worker.
synchronized (stateLock) {
switch(type) {
case ALARM:
aggregateTasklets(workerToMasterReports, results, aggregatedTasklets);
break;
case COUNT:
if (!aggregateOnCount()) {
return;
}
aggregateTasklets(workerToMasterReports, results, aggregatedTasklets);
break;
default:
throw new RuntimeException("Unexpected aggregate type.");
}
}
if (!results.isEmpty()) {
// Run the aggregation function.
try {
final Object aggregationResult = taskletAggregationRequest.executeAggregation(results);
workerToMasterReports.add(new TaskletAggregationResultReport(aggregatedTasklets, aggregationResult));
} catch (final Exception e) {
workerToMasterReports.add(new TaskletAggregationFailureReport(aggregatedTasklets, e));
}
}
// Add to worker report only if there is something to report back.
if (!workerToMasterReports.isEmpty()) {
workerReportsQueue.addLast(kryoUtils.serialize(new WorkerToMasterReports(workerToMasterReports)));
heartBeatTriggerManager.triggerHeartBeat();
}
} }
|
public class class_name {
private void aggregateTasklets(final AggregateTriggerType type) {
final List<WorkerToMasterReport> workerToMasterReports = new ArrayList<>();
final List<Object> results = new ArrayList<>();
final List<Integer> aggregatedTasklets = new ArrayList<>();
// Synchronization to prevent duplication of work on the same aggregation function on the same worker.
synchronized (stateLock) {
switch(type) {
case ALARM:
aggregateTasklets(workerToMasterReports, results, aggregatedTasklets);
break;
case COUNT:
if (!aggregateOnCount()) {
return; // depends on control dependency: [if], data = [none]
}
aggregateTasklets(workerToMasterReports, results, aggregatedTasklets);
break;
default:
throw new RuntimeException("Unexpected aggregate type.");
}
}
if (!results.isEmpty()) {
// Run the aggregation function.
try {
final Object aggregationResult = taskletAggregationRequest.executeAggregation(results);
workerToMasterReports.add(new TaskletAggregationResultReport(aggregatedTasklets, aggregationResult)); // depends on control dependency: [try], data = [none]
} catch (final Exception e) {
workerToMasterReports.add(new TaskletAggregationFailureReport(aggregatedTasklets, e));
} // depends on control dependency: [catch], data = [none]
}
// Add to worker report only if there is something to report back.
if (!workerToMasterReports.isEmpty()) {
workerReportsQueue.addLast(kryoUtils.serialize(new WorkerToMasterReports(workerToMasterReports))); // depends on control dependency: [if], data = [none]
heartBeatTriggerManager.triggerHeartBeat(); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static <T> Optional<T> get(final List list, final Class<T> clazz, final Integer... path) {
if (path == null || path.length == 0) {
throw new IllegalArgumentException(PATH_MUST_BE_SPECIFIED);
}
if (path.length == 1) {
return Optional.ofNullable((T) list.get(path[0]));
}
final Integer[] pathToLastNode = Arrays.copyOfRange(path, 0, path.length - 1);
final Integer lastKey = path[path.length - 1];
List intermediateList = list;
for (Integer index : pathToLastNode) {
final Object node = intermediateList.get(index);
if (node != null) {
final Optional<List> nodeListOption = ClassApi.cast(node, List.class);
if (nodeListOption.isPresent()) {
intermediateList = nodeListOption.get();
} else {
throw new IllegalAccessError("Node with key '" + index + "' is not a list!");
}
} else {
return Optional.empty();
}
}
return Optional.ofNullable((T) intermediateList.get(lastKey));
} }
|
public class class_name {
public static <T> Optional<T> get(final List list, final Class<T> clazz, final Integer... path) {
if (path == null || path.length == 0) {
throw new IllegalArgumentException(PATH_MUST_BE_SPECIFIED);
}
if (path.length == 1) {
return Optional.ofNullable((T) list.get(path[0])); // depends on control dependency: [if], data = [none]
}
final Integer[] pathToLastNode = Arrays.copyOfRange(path, 0, path.length - 1);
final Integer lastKey = path[path.length - 1];
List intermediateList = list;
for (Integer index : pathToLastNode) {
final Object node = intermediateList.get(index);
if (node != null) {
final Optional<List> nodeListOption = ClassApi.cast(node, List.class);
if (nodeListOption.isPresent()) {
intermediateList = nodeListOption.get(); // depends on control dependency: [if], data = [none]
} else {
throw new IllegalAccessError("Node with key '" + index + "' is not a list!");
}
} else {
return Optional.empty(); // depends on control dependency: [if], data = [none]
}
}
return Optional.ofNullable((T) intermediateList.get(lastKey));
} }
|
public class class_name {
private String decodeIfEncoded(String value) {
if (StringUtils.contains(value, "%")) {
try {
return URLDecoder.decode(value, CharEncoding.UTF_8);
}
catch (UnsupportedEncodingException ex) {
throw new RuntimeException(ex);
}
}
return value;
} }
|
public class class_name {
private String decodeIfEncoded(String value) {
if (StringUtils.contains(value, "%")) {
try {
return URLDecoder.decode(value, CharEncoding.UTF_8); // depends on control dependency: [try], data = [none]
}
catch (UnsupportedEncodingException ex) {
throw new RuntimeException(ex);
} // depends on control dependency: [catch], data = [none]
}
return value;
} }
|
public class class_name {
public JsonArray getDataTypeValues() {
if (this.type.equals(RECORD)) {
return getDataType().get(RECORD_FIELDS_KEY).getAsJsonArray();
}
return new JsonArray();
} }
|
public class class_name {
public JsonArray getDataTypeValues() {
if (this.type.equals(RECORD)) {
return getDataType().get(RECORD_FIELDS_KEY).getAsJsonArray(); // depends on control dependency: [if], data = [none]
}
return new JsonArray();
} }
|
public class class_name {
private void sendOnTextChanged(CharSequence text, int start, int before, int after) {
final List<TextWatcher> list = mExternalTextWatchers;
final int count = list.size();
for (int i = 0; i < count; i++) {
TextWatcher watcher = list.get(i);
if (watcher != this) {
watcher.onTextChanged(text, start, before, after);
}
}
} }
|
public class class_name {
private void sendOnTextChanged(CharSequence text, int start, int before, int after) {
final List<TextWatcher> list = mExternalTextWatchers;
final int count = list.size();
for (int i = 0; i < count; i++) {
TextWatcher watcher = list.get(i);
if (watcher != this) {
watcher.onTextChanged(text, start, before, after); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
@Override
public void isValid() throws Exception {
if (numStmnts() >= 2) {
if (primaryJoinableHooks.size() > 0) {
for (String joinField:primaryJoinableHooks) {
if (joinField.equals("timestamp")) {// timestamp is implicitly present in the result.
continue;
}
for (QueryMeta query: getAllStmnts()) {
boolean isPresent = false;
if (query instanceof BaseAggQueryMeta) {// Since BaseAggQueryMeta is also a PlainDimQueryMeta we go with former first.
for (AggItem item:((BaseAggQueryMeta)query).aggregations) {
if (joinField.equals(item.fieldName) || (item.asName != null && joinField.equals(item.asName))) {
isPresent = true;
}
}
}
if (query instanceof PlainDimQueryMeta) {
Map<String, String> dims = ((PlainDimQueryMeta)query).fetchDimensions;
for (Map.Entry<String, String> entry :dims.entrySet()) {
if (joinField.equals(entry.getKey()) || (entry.getValue() != null && joinField.equals(entry.getValue()))) {
isPresent = true;
}
}
}
if (!isPresent) {
String error = joinField + " mentioned in the composite key should appear in all the data sources being queried and joined.";
throw new Exception(error);
}
}
}
} else {
String error = "Need atleast one field to join on";
throw new Exception(error);
}
}
} }
|
public class class_name {
@Override
public void isValid() throws Exception {
if (numStmnts() >= 2) {
if (primaryJoinableHooks.size() > 0) {
for (String joinField:primaryJoinableHooks) {
if (joinField.equals("timestamp")) {// timestamp is implicitly present in the result.
continue;
}
for (QueryMeta query: getAllStmnts()) {
boolean isPresent = false;
if (query instanceof BaseAggQueryMeta) {// Since BaseAggQueryMeta is also a PlainDimQueryMeta we go with former first.
for (AggItem item:((BaseAggQueryMeta)query).aggregations) {
if (joinField.equals(item.fieldName) || (item.asName != null && joinField.equals(item.asName))) {
isPresent = true; // depends on control dependency: [if], data = [none]
}
}
}
if (query instanceof PlainDimQueryMeta) {
Map<String, String> dims = ((PlainDimQueryMeta)query).fetchDimensions;
for (Map.Entry<String, String> entry :dims.entrySet()) {
if (joinField.equals(entry.getKey()) || (entry.getValue() != null && joinField.equals(entry.getValue()))) {
isPresent = true; // depends on control dependency: [if], data = [none]
}
}
}
if (!isPresent) {
String error = joinField + " mentioned in the composite key should appear in all the data sources being queried and joined."; // depends on control dependency: [if], data = [none]
throw new Exception(error);
}
}
}
} else {
String error = "Need atleast one field to join on";
throw new Exception(error);
}
}
} }
|
public class class_name {
public static Object toPrimitiveArray(Object[] array) {
Class primitiveType;
if (array.length > 0) {
LOG.debug("很可能array是用new Object[length]()构造的,这个时候array.getClass().getComponentType()返回的是Object类型,这不是我们期望的" +
"我们希望使用元素的实际类型,这里有一个风险点,即数组类型不一致,后面可能就会抛出类型转换异常");
primitiveType = Reflection.getPrimitiveType(array[0].getClass());
} else {
primitiveType = Reflection.getPrimitiveType(array.getClass().getComponentType());
}
Object primitiveArray = Array.newInstance(primitiveType, array.length);
for (int i = 0; i < array.length; i++) {
Array.set(primitiveArray, i, array[i]);
}
return primitiveArray;
} }
|
public class class_name {
public static Object toPrimitiveArray(Object[] array) {
Class primitiveType;
if (array.length > 0) {
LOG.debug("很可能array是用new Object[length]()构造的,这个时候array.getClass().getComponentType()返回的是Object类型,这不是我们期望的" +
"我们希望使用元素的实际类型,这里有一个风险点,即数组类型不一致,后面可能就会抛出类型转换异常"); // depends on control dependency: [if], data = [none]
primitiveType = Reflection.getPrimitiveType(array[0].getClass()); // depends on control dependency: [if], data = [none]
} else {
primitiveType = Reflection.getPrimitiveType(array.getClass().getComponentType()); // depends on control dependency: [if], data = [none]
}
Object primitiveArray = Array.newInstance(primitiveType, array.length);
for (int i = 0; i < array.length; i++) {
Array.set(primitiveArray, i, array[i]); // depends on control dependency: [for], data = [i]
}
return primitiveArray;
} }
|
public class class_name {
public Map<String, ExpressionMapping> getInputOutputExpressionMappings() {
Map<String, ExpressionMapping> map = new LinkedHashMap<String, ExpressionMapping>();
for (ExpressionMapping em : _inputExpressionMappings) {
String output = em.getOutput();
if (output != null) {
if (map.containsKey(output)) {
throw new IllegalArgumentException("duplicate input/output variable [" + output + "] not allowed");
} else {
map.put(output, em);
}
}
}
return map;
} }
|
public class class_name {
public Map<String, ExpressionMapping> getInputOutputExpressionMappings() {
Map<String, ExpressionMapping> map = new LinkedHashMap<String, ExpressionMapping>();
for (ExpressionMapping em : _inputExpressionMappings) {
String output = em.getOutput();
if (output != null) {
if (map.containsKey(output)) {
throw new IllegalArgumentException("duplicate input/output variable [" + output + "] not allowed");
} else {
map.put(output, em); // depends on control dependency: [if], data = [none]
}
}
}
return map;
} }
|
public class class_name {
public CatalogBuilder makeCatalogFromDirectory(File baseDir, String matchRemaining, URI baseURI) throws IOException {
String relLocation = (matchRemaining.length() >= 1) ? location + "/" + matchRemaining : location;
String name = (matchRemaining.length() >= 1) ? getName() + "/" + matchRemaining : getName();
File absLocation = new File( baseDir, relLocation);
// it must be a directory
Path wantDir = absLocation.toPath();
if (!Files.exists(wantDir)) throw new FileNotFoundException("Requested catalog does not exist =" + absLocation);
if (!Files.isDirectory(wantDir)) throw new FileNotFoundException("Not a directory =" + absLocation);
// Setup and create catalog builder.
CatalogBuilder catBuilder = new CatalogBuilder();
catBuilder.setBaseURI(baseURI);
assert this.getParentCatalog() != null;
DatasetBuilder top = new DatasetBuilder(null);
top.transferMetadata(this, true);
top.setName(name);
top.put(Dataset.Id, null); // no id for top
catBuilder.addDataset(top);
// first look for catalogs
try (DirectoryStream<Path> ds = Files.newDirectoryStream(wantDir, "*.xml")) {
for (Path p : ds) {
if (!Files.isDirectory(p)) {
String pfilename = p.getFileName().toString();
String urlPath = pfilename;
//String path = dataDirComplete.length() == 0 ? filename : dataDirComplete + "/" + filename; // reletive starting from current directory
CatalogRefBuilder catref = new CatalogRefBuilder(top);
catref.setTitle(urlPath);
catref.setHref(urlPath);
top.addDataset(catref);
}
}
}
// now look for directories
try (DirectoryStream<Path> ds = Files.newDirectoryStream(wantDir)) {
for (Path dir : ds) {
if (Files.isDirectory(dir)) {
String dfilename = dir.getFileName().toString();
String urlPath = (matchRemaining.length() >= 1) ? dfilename + "/" + matchRemaining : dfilename;
CatalogRefBuilder catref = new CatalogRefBuilder(top);
catref.setTitle(urlPath);
catref.setHref(urlPath + "/"+ CATSCAN);
catref.addToList(Dataset.Properties, new Property("CatalogScan", "true"));
top.addDataset(catref);
}
}
}
return catBuilder;
} }
|
public class class_name {
public CatalogBuilder makeCatalogFromDirectory(File baseDir, String matchRemaining, URI baseURI) throws IOException {
String relLocation = (matchRemaining.length() >= 1) ? location + "/" + matchRemaining : location;
String name = (matchRemaining.length() >= 1) ? getName() + "/" + matchRemaining : getName();
File absLocation = new File( baseDir, relLocation);
// it must be a directory
Path wantDir = absLocation.toPath();
if (!Files.exists(wantDir)) throw new FileNotFoundException("Requested catalog does not exist =" + absLocation);
if (!Files.isDirectory(wantDir)) throw new FileNotFoundException("Not a directory =" + absLocation);
// Setup and create catalog builder.
CatalogBuilder catBuilder = new CatalogBuilder();
catBuilder.setBaseURI(baseURI);
assert this.getParentCatalog() != null;
DatasetBuilder top = new DatasetBuilder(null);
top.transferMetadata(this, true);
top.setName(name);
top.put(Dataset.Id, null); // no id for top
catBuilder.addDataset(top);
// first look for catalogs
try (DirectoryStream<Path> ds = Files.newDirectoryStream(wantDir, "*.xml")) {
for (Path p : ds) {
if (!Files.isDirectory(p)) {
String pfilename = p.getFileName().toString();
String urlPath = pfilename;
//String path = dataDirComplete.length() == 0 ? filename : dataDirComplete + "/" + filename; // reletive starting from current directory
CatalogRefBuilder catref = new CatalogRefBuilder(top);
catref.setTitle(urlPath); // depends on control dependency: [if], data = [none]
catref.setHref(urlPath); // depends on control dependency: [if], data = [none]
top.addDataset(catref); // depends on control dependency: [if], data = [none]
}
}
}
// now look for directories
try (DirectoryStream<Path> ds = Files.newDirectoryStream(wantDir)) {
for (Path dir : ds) {
if (Files.isDirectory(dir)) {
String dfilename = dir.getFileName().toString();
String urlPath = (matchRemaining.length() >= 1) ? dfilename + "/" + matchRemaining : dfilename;
CatalogRefBuilder catref = new CatalogRefBuilder(top);
catref.setTitle(urlPath);
catref.setHref(urlPath + "/"+ CATSCAN);
catref.addToList(Dataset.Properties, new Property("CatalogScan", "true"));
top.addDataset(catref);
}
}
}
return catBuilder;
} }
|
public class class_name {
private static String generateFlowToken(SipServletRequestImpl request) {
StringBuffer arrayS = new StringBuffer();
arrayS.append(request.getTransport());
arrayS.append("_");
arrayS.append(request.getLocalAddr());
arrayS.append("_");
arrayS.append(request.getLocalPort());
arrayS.append("_");
arrayS.append(request.getInitialRemoteAddr());
arrayS.append("_");
arrayS.append(request.getInitialRemotePort());
if(logger.isDebugEnabled()) {
logger.debug("Generating RFC 5626 Flow token from " + arrayS);
}
byte[] byteArrayS = arrayS.toString().trim().getBytes();
byte[] hmac = mac.doFinal(byteArrayS);
if(logger.isDebugEnabled()) {
logger.debug("Generating RFC 5626 Flow token hmac is " + new String(hmac).trim());
}
hmac = new String(hmac).trim().getBytes();
byte[] concatDelimiter = "/".getBytes();
byte[] byteConcat = new byte[hmac.length + concatDelimiter.length + byteArrayS.length];
System.arraycopy(hmac, 0, byteConcat, 0, hmac.length);
System.arraycopy(concatDelimiter, 0, byteConcat, hmac.length, concatDelimiter.length);
System.arraycopy(byteArrayS, 0, byteConcat, hmac.length + concatDelimiter.length, byteArrayS.length);
if(logger.isDebugEnabled()) {
logger.debug("Generating RFC 5626 Flow token byteContact before base64 encoding is " + new String(byteConcat).trim());
}
// does not result in a 32 octet id though...
byte[] base64Encoded = Base64.encodeBase64(byteConcat);
if(logger.isDebugEnabled()) {
logger.debug("Generating RFC 5626 Flow token byteContact after base64 encoding is " + new String(base64Encoded).trim());
}
return new String(base64Encoded).trim();
} }
|
public class class_name {
private static String generateFlowToken(SipServletRequestImpl request) {
StringBuffer arrayS = new StringBuffer();
arrayS.append(request.getTransport());
arrayS.append("_");
arrayS.append(request.getLocalAddr());
arrayS.append("_");
arrayS.append(request.getLocalPort());
arrayS.append("_");
arrayS.append(request.getInitialRemoteAddr());
arrayS.append("_");
arrayS.append(request.getInitialRemotePort());
if(logger.isDebugEnabled()) {
logger.debug("Generating RFC 5626 Flow token from " + arrayS); // depends on control dependency: [if], data = [none]
}
byte[] byteArrayS = arrayS.toString().trim().getBytes();
byte[] hmac = mac.doFinal(byteArrayS);
if(logger.isDebugEnabled()) {
logger.debug("Generating RFC 5626 Flow token hmac is " + new String(hmac).trim()); // depends on control dependency: [if], data = [none]
}
hmac = new String(hmac).trim().getBytes();
byte[] concatDelimiter = "/".getBytes();
byte[] byteConcat = new byte[hmac.length + concatDelimiter.length + byteArrayS.length];
System.arraycopy(hmac, 0, byteConcat, 0, hmac.length);
System.arraycopy(concatDelimiter, 0, byteConcat, hmac.length, concatDelimiter.length);
System.arraycopy(byteArrayS, 0, byteConcat, hmac.length + concatDelimiter.length, byteArrayS.length);
if(logger.isDebugEnabled()) {
logger.debug("Generating RFC 5626 Flow token byteContact before base64 encoding is " + new String(byteConcat).trim()); // depends on control dependency: [if], data = [none]
}
// does not result in a 32 octet id though...
byte[] base64Encoded = Base64.encodeBase64(byteConcat);
if(logger.isDebugEnabled()) {
logger.debug("Generating RFC 5626 Flow token byteContact after base64 encoding is " + new String(base64Encoded).trim()); // depends on control dependency: [if], data = [none]
}
return new String(base64Encoded).trim();
} }
|
public class class_name {
private InputStream getErrorStream() {
InputStream errorStream = this.connection.getErrorStream();
if (errorStream != null) {
final String contentEncoding = this.connection.getContentEncoding();
if (contentEncoding != null && contentEncoding.equalsIgnoreCase("gzip")) {
try {
errorStream = new GZIPInputStream(errorStream);
} catch (IOException e) {
// just return the error stream as is
}
}
}
return errorStream;
} }
|
public class class_name {
private InputStream getErrorStream() {
InputStream errorStream = this.connection.getErrorStream();
if (errorStream != null) {
final String contentEncoding = this.connection.getContentEncoding();
if (contentEncoding != null && contentEncoding.equalsIgnoreCase("gzip")) {
try {
errorStream = new GZIPInputStream(errorStream); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
// just return the error stream as is
} // depends on control dependency: [catch], data = [none]
}
}
return errorStream;
} }
|
public class class_name {
public static DataAnalysis analyze(Schema schema, RecordReader rr, int maxHistogramBuckets){
AnalysisAddFunction addFn = new AnalysisAddFunction(schema);
List<AnalysisCounter> counters = null;
while(rr.hasNext()){
counters = addFn.apply(counters, rr.next());
}
double[][] minsMaxes = new double[counters.size()][2];
List<ColumnType> columnTypes = schema.getColumnTypes();
List<ColumnAnalysis> list = DataVecAnalysisUtils.convertCounters(counters, minsMaxes, columnTypes);
//Do another pass collecting histogram values:
List<HistogramCounter> histogramCounters = null;
HistogramAddFunction add = new HistogramAddFunction(maxHistogramBuckets, schema, minsMaxes);
if(rr.resetSupported()){
rr.reset();
while(rr.hasNext()){
histogramCounters = add.apply(histogramCounters, rr.next());
}
DataVecAnalysisUtils.mergeCounters(list, histogramCounters);
}
return new DataAnalysis(schema, list);
} }
|
public class class_name {
public static DataAnalysis analyze(Schema schema, RecordReader rr, int maxHistogramBuckets){
AnalysisAddFunction addFn = new AnalysisAddFunction(schema);
List<AnalysisCounter> counters = null;
while(rr.hasNext()){
counters = addFn.apply(counters, rr.next()); // depends on control dependency: [while], data = [none]
}
double[][] minsMaxes = new double[counters.size()][2];
List<ColumnType> columnTypes = schema.getColumnTypes();
List<ColumnAnalysis> list = DataVecAnalysisUtils.convertCounters(counters, minsMaxes, columnTypes);
//Do another pass collecting histogram values:
List<HistogramCounter> histogramCounters = null;
HistogramAddFunction add = new HistogramAddFunction(maxHistogramBuckets, schema, minsMaxes);
if(rr.resetSupported()){
rr.reset(); // depends on control dependency: [if], data = [none]
while(rr.hasNext()){
histogramCounters = add.apply(histogramCounters, rr.next()); // depends on control dependency: [while], data = [none]
}
DataVecAnalysisUtils.mergeCounters(list, histogramCounters); // depends on control dependency: [if], data = [none]
}
return new DataAnalysis(schema, list);
} }
|
public class class_name {
public static PGPPublicKey getPublicKey(InputStream content) throws Exception {
InputStream in = PGPUtil.getDecoderStream(content);
PGPPublicKeyRingCollection keyRingCollection = new PGPPublicKeyRingCollection(in, new BcKeyFingerprintCalculator());
PGPPublicKey key = null;
Iterator<PGPPublicKeyRing> keyRings = keyRingCollection.getKeyRings();
while(key == null && keyRings.hasNext()) {
PGPPublicKeyRing keyRing = keyRings.next();
Iterator<PGPPublicKey> keys = keyRing.getPublicKeys();
while(key == null && keys.hasNext()) {
PGPPublicKey current = keys.next();
if(current.isEncryptionKey()) {
key = current;
}
}
}
return key;
} }
|
public class class_name {
public static PGPPublicKey getPublicKey(InputStream content) throws Exception {
InputStream in = PGPUtil.getDecoderStream(content);
PGPPublicKeyRingCollection keyRingCollection = new PGPPublicKeyRingCollection(in, new BcKeyFingerprintCalculator());
PGPPublicKey key = null;
Iterator<PGPPublicKeyRing> keyRings = keyRingCollection.getKeyRings();
while(key == null && keyRings.hasNext()) {
PGPPublicKeyRing keyRing = keyRings.next();
Iterator<PGPPublicKey> keys = keyRing.getPublicKeys();
while(key == null && keys.hasNext()) {
PGPPublicKey current = keys.next();
if(current.isEncryptionKey()) {
key = current; // depends on control dependency: [if], data = [none]
}
}
}
return key;
} }
|
public class class_name {
@Override
protected void checkDeviceConnection(DeviceProxy deviceProxy,
String attribute, DeviceData deviceData, String event_name) throws DevFailed {
// Check if address is coherent (??)
deviceData = checkZmqAddress(deviceData, deviceProxy);
String deviceName = deviceProxy.fullName();
ApiUtil.printTrace("checkDeviceConnection for " + deviceName);
if (!device_channel_map.containsKey(deviceName)) {
ApiUtil.printTrace(" Does NOT Exist");
connect(deviceProxy, attribute, event_name, deviceData);
if (!device_channel_map.containsKey(deviceName)) {
Except.throw_event_system_failed("API_NotificationServiceFailed",
"Failed to connect to event channel for device",
"EventConsumer.subscribe_event()");
}
}
else {
ApiUtil.printTrace(deviceName + " already connected.");
ZMQutils.connectEvent(deviceProxy.get_tango_host(), deviceName,
attribute, deviceData.extractLongStringArray(), event_name,false);
}
} }
|
public class class_name {
@Override
protected void checkDeviceConnection(DeviceProxy deviceProxy,
String attribute, DeviceData deviceData, String event_name) throws DevFailed {
// Check if address is coherent (??)
deviceData = checkZmqAddress(deviceData, deviceProxy);
String deviceName = deviceProxy.fullName();
ApiUtil.printTrace("checkDeviceConnection for " + deviceName);
if (!device_channel_map.containsKey(deviceName)) {
ApiUtil.printTrace(" Does NOT Exist");
connect(deviceProxy, attribute, event_name, deviceData);
if (!device_channel_map.containsKey(deviceName)) {
Except.throw_event_system_failed("API_NotificationServiceFailed",
"Failed to connect to event channel for device",
"EventConsumer.subscribe_event()"); // depends on control dependency: [if], data = [none]
}
}
else {
ApiUtil.printTrace(deviceName + " already connected.");
ZMQutils.connectEvent(deviceProxy.get_tango_host(), deviceName,
attribute, deviceData.extractLongStringArray(), event_name,false);
}
} }
|
public class class_name {
private void getHdotVFiniteDifference(double[] x, double[] v, double[] curDerivative){
double h = finiteDifferenceStepSize;
double hInv = 1/h; // this avoids dividing too much since it's a bit more expensive than multiplying
if (gradPerturbed == null) {
gradPerturbed = new double[x.length];
System.out.println("Setting approximate gradient.");
}
if (xPerturbed == null){
xPerturbed = new double[x.length];
System.out.println("Setting perturbed.");
}
if (HdotV == null) {
HdotV = new double[x.length];
System.out.println("Setting H dot V.");
}
// This adds h*v to x ---> x = x + h*v
for( int i = 0;i<x.length;i++){
xPerturbed[i] = x[i] + h*v[i];
}
double prevValue = value;
recalculatePrevBatch = true;
calculateStochastic(xPerturbed,null,thisBatch); // Call the calculate function without updating the batch
// System.arraycopy(derivative, 0, gradPerturbed, 0, gradPerturbed.length);
// This comes up with the approximate difference, and renormalizes it on h.
for( int i = 0;i<x.length;i++){
double tmp = (derivative[i]-curDerivative[i]);
HdotV[i] = hInv*(tmp);
}
//Make sure the original derivative is in place
System.arraycopy(curDerivative,0,derivative,0,derivative.length);
value = prevValue;
hasNewVals = false;
recalculatePrevBatch = false;
returnPreviousValues = false;
} }
|
public class class_name {
private void getHdotVFiniteDifference(double[] x, double[] v, double[] curDerivative){
double h = finiteDifferenceStepSize;
double hInv = 1/h; // this avoids dividing too much since it's a bit more expensive than multiplying
if (gradPerturbed == null) {
gradPerturbed = new double[x.length];
// depends on control dependency: [if], data = [none]
System.out.println("Setting approximate gradient.");
// depends on control dependency: [if], data = [none]
}
if (xPerturbed == null){
xPerturbed = new double[x.length];
// depends on control dependency: [if], data = [none]
System.out.println("Setting perturbed.");
// depends on control dependency: [if], data = [none]
}
if (HdotV == null) {
HdotV = new double[x.length];
// depends on control dependency: [if], data = [none]
System.out.println("Setting H dot V.");
// depends on control dependency: [if], data = [none]
}
// This adds h*v to x ---> x = x + h*v
for( int i = 0;i<x.length;i++){
xPerturbed[i] = x[i] + h*v[i];
// depends on control dependency: [for], data = [i]
}
double prevValue = value;
recalculatePrevBatch = true;
calculateStochastic(xPerturbed,null,thisBatch); // Call the calculate function without updating the batch
// System.arraycopy(derivative, 0, gradPerturbed, 0, gradPerturbed.length);
// This comes up with the approximate difference, and renormalizes it on h.
for( int i = 0;i<x.length;i++){
double tmp = (derivative[i]-curDerivative[i]);
HdotV[i] = hInv*(tmp);
// depends on control dependency: [for], data = [i]
}
//Make sure the original derivative is in place
System.arraycopy(curDerivative,0,derivative,0,derivative.length);
value = prevValue;
hasNewVals = false;
recalculatePrevBatch = false;
returnPreviousValues = false;
} }
|
public class class_name {
public static DocumentBuilder createDocumentBuilder() {
final DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
disableXXE(dbf);
DocumentBuilder builder = null;
try {
builder = dbf.newDocumentBuilder();
} catch (Exception e) {
throw new UtilException(e, "Create xml document error!");
}
return builder;
} }
|
public class class_name {
public static DocumentBuilder createDocumentBuilder() {
final DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
disableXXE(dbf);
DocumentBuilder builder = null;
try {
builder = dbf.newDocumentBuilder();
// depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new UtilException(e, "Create xml document error!");
}
// depends on control dependency: [catch], data = [none]
return builder;
} }
|
public class class_name {
public static void joinUntil(Clock clock, Thread thread, long endTime)
throws InterruptedException, TimeoutException
{
if(endTime <= 0)
thread.join();
else
{
while(thread.isAlive())
{
long now = clock.currentTimeMillis();
if(now >= endTime)
throw new TimeoutException("timeout reached while joining on: " + thread);
thread.join(endTime - now);
}
}
} }
|
public class class_name {
public static void joinUntil(Clock clock, Thread thread, long endTime)
throws InterruptedException, TimeoutException
{
if(endTime <= 0)
thread.join();
else
{
while(thread.isAlive())
{
long now = clock.currentTimeMillis();
if(now >= endTime)
throw new TimeoutException("timeout reached while joining on: " + thread);
thread.join(endTime - now); // depends on control dependency: [while], data = [none]
}
}
} }
|
public class class_name {
public int lock()
{
if (tc.isEntryEnabled()) Tr.entry(tc, "lock");
int result = LOCK_FAILURE;
// Ensure that the target lock files exist.
if (!_lockFilesExist)
{
if (tc.isDebugEnabled()) Tr.debug(tc,"Confirming/Creating the lock files");
_handle1 = ensureFileExists(RECOVERY_FILE_1_NAME);
if (_handle1 != null)
{
_handle2 = ensureFileExists(RECOVERY_FILE_2_NAME);
if (_handle2 != null)
{
_lockFilesExist = true;
}
else
{
Tr.error(tc,"CWRLS0004_RECOVERY_LOG_CREATE_FAILED", _lockDirectory + File.separator + RECOVERY_FILE_2_NAME);
_handle1 = null;
}
}
else
{
Tr.error(tc,"CWRLS0004_RECOVERY_LOG_CREATE_FAILED", _lockDirectory + File.separator + RECOVERY_FILE_1_NAME);
}
}
else
{
if (tc.isDebugEnabled()) Tr.debug(tc,"Already Confirmed/Created the lock files");
}
if (_lockFilesExist)
{
result = obtainLock(_handle1);
if (result == LOCK_SUCCESS)
{
result = obtainLock(_handle2);
if (result != LOCK_SUCCESS)
{
if (result == LOCK_FAILURE)
{
Tr.error(tc,"CWRLS0005_RECOVERY_LOG_LOCK_FAILED", _lockDirectory + File.separator + RECOVERY_FILE_2_NAME);
}
releaseLock(_handle1);
}
}
else if (result == LOCK_FAILURE)
{
Tr.error(tc,"CWRLS0005_RECOVERY_LOG_LOCK_FAILED", _lockDirectory + File.separator + RECOVERY_FILE_1_NAME);
}
}
if (tc.isEntryEnabled()) Tr.exit(tc, "lock",new Integer(result));
return result;
} }
|
public class class_name {
public int lock()
{
if (tc.isEntryEnabled()) Tr.entry(tc, "lock");
int result = LOCK_FAILURE;
// Ensure that the target lock files exist.
if (!_lockFilesExist)
{
if (tc.isDebugEnabled()) Tr.debug(tc,"Confirming/Creating the lock files");
_handle1 = ensureFileExists(RECOVERY_FILE_1_NAME); // depends on control dependency: [if], data = [none]
if (_handle1 != null)
{
_handle2 = ensureFileExists(RECOVERY_FILE_2_NAME); // depends on control dependency: [if], data = [none]
if (_handle2 != null)
{
_lockFilesExist = true; // depends on control dependency: [if], data = [none]
}
else
{
Tr.error(tc,"CWRLS0004_RECOVERY_LOG_CREATE_FAILED", _lockDirectory + File.separator + RECOVERY_FILE_2_NAME); // depends on control dependency: [if], data = [none]
_handle1 = null; // depends on control dependency: [if], data = [none]
}
}
else
{
Tr.error(tc,"CWRLS0004_RECOVERY_LOG_CREATE_FAILED", _lockDirectory + File.separator + RECOVERY_FILE_1_NAME); // depends on control dependency: [if], data = [none]
}
}
else
{
if (tc.isDebugEnabled()) Tr.debug(tc,"Already Confirmed/Created the lock files");
}
if (_lockFilesExist)
{
result = obtainLock(_handle1); // depends on control dependency: [if], data = [none]
if (result == LOCK_SUCCESS)
{
result = obtainLock(_handle2); // depends on control dependency: [if], data = [none]
if (result != LOCK_SUCCESS)
{
if (result == LOCK_FAILURE)
{
Tr.error(tc,"CWRLS0005_RECOVERY_LOG_LOCK_FAILED", _lockDirectory + File.separator + RECOVERY_FILE_2_NAME); // depends on control dependency: [if], data = [none]
}
releaseLock(_handle1); // depends on control dependency: [if], data = [none]
}
}
else if (result == LOCK_FAILURE)
{
Tr.error(tc,"CWRLS0005_RECOVERY_LOG_LOCK_FAILED", _lockDirectory + File.separator + RECOVERY_FILE_1_NAME); // depends on control dependency: [if], data = [none]
}
}
if (tc.isEntryEnabled()) Tr.exit(tc, "lock",new Integer(result));
return result;
} }
|
public class class_name {
public static GeometryMetadata createGeometryMetadata(Cursor cursor) {
GeometryMetadata metadata = new GeometryMetadata();
metadata.setGeoPackageId(cursor.getLong(0));
metadata.setTableName(cursor.getString(1));
metadata.setId(cursor.getLong(2));
metadata.setMinX(cursor.getDouble(3));
metadata.setMaxX(cursor.getDouble(4));
metadata.setMinY(cursor.getDouble(5));
metadata.setMaxY(cursor.getDouble(6));
if (!cursor.isNull(7)) {
metadata.setMinZ(cursor.getDouble(7));
}
if (!cursor.isNull(8)) {
metadata.setMaxZ(cursor.getDouble(8));
}
if (!cursor.isNull(9)) {
metadata.setMinM(cursor.getDouble(9));
}
if (!cursor.isNull(10)) {
metadata.setMaxM(cursor.getDouble(10));
}
return metadata;
} }
|
public class class_name {
public static GeometryMetadata createGeometryMetadata(Cursor cursor) {
GeometryMetadata metadata = new GeometryMetadata();
metadata.setGeoPackageId(cursor.getLong(0));
metadata.setTableName(cursor.getString(1));
metadata.setId(cursor.getLong(2));
metadata.setMinX(cursor.getDouble(3));
metadata.setMaxX(cursor.getDouble(4));
metadata.setMinY(cursor.getDouble(5));
metadata.setMaxY(cursor.getDouble(6));
if (!cursor.isNull(7)) {
metadata.setMinZ(cursor.getDouble(7)); // depends on control dependency: [if], data = [none]
}
if (!cursor.isNull(8)) {
metadata.setMaxZ(cursor.getDouble(8)); // depends on control dependency: [if], data = [none]
}
if (!cursor.isNull(9)) {
metadata.setMinM(cursor.getDouble(9)); // depends on control dependency: [if], data = [none]
}
if (!cursor.isNull(10)) {
metadata.setMaxM(cursor.getDouble(10)); // depends on control dependency: [if], data = [none]
}
return metadata;
} }
|
public class class_name {
private StructuralNode getStartNode(URI startURI, boolean recurse) throws ApiException {
StructuralNode startNode = null;
try {
if (recurse) {
startNode = SessionStructure.find(Model.getSingleton().getSession().getSessionId(), startURI, "", "");
}
if (startNode == null) {
startNode = SessionStructure.find(Model.getSingleton().getSession().getSessionId(), startURI, "GET", "");
}
} catch (Exception e) {
throw new ApiException(ApiException.Type.INTERNAL_ERROR, e);
}
return startNode;
} }
|
public class class_name {
private StructuralNode getStartNode(URI startURI, boolean recurse) throws ApiException {
StructuralNode startNode = null;
try {
if (recurse) {
startNode = SessionStructure.find(Model.getSingleton().getSession().getSessionId(), startURI, "", ""); // depends on control dependency: [if], data = [none]
}
if (startNode == null) {
startNode = SessionStructure.find(Model.getSingleton().getSession().getSessionId(), startURI, "GET", ""); // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
throw new ApiException(ApiException.Type.INTERNAL_ERROR, e);
}
return startNode;
} }
|
public class class_name {
private void mergeEngineeringObjectWithReferencedModel(Field field, EngineeringObjectModelWrapper model) {
AdvancedModelWrapper result = performMerge(loadReferencedModel(model, field), model);
if (result != null) {
model = result.toEngineeringObject();
}
} }
|
public class class_name {
private void mergeEngineeringObjectWithReferencedModel(Field field, EngineeringObjectModelWrapper model) {
AdvancedModelWrapper result = performMerge(loadReferencedModel(model, field), model);
if (result != null) {
model = result.toEngineeringObject(); // depends on control dependency: [if], data = [none]
}
} }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.