code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
@Override
public ChronoPeriod plus(TemporalAmount amountToAdd) {
if (amountToAdd instanceof ChronoPeriodImpl) {
ChronoPeriodImpl amount = (ChronoPeriodImpl) amountToAdd;
if (amount.getChronology().equals(getChronology())) {
return new ChronoPeriodImpl(
chronology,
Jdk8Methods.safeAdd(years, amount.years),
Jdk8Methods.safeAdd(months, amount.months),
Jdk8Methods.safeAdd(days, amount.days));
}
}
throw new DateTimeException("Unable to add amount: " + amountToAdd);
} } | public class class_name {
@Override
public ChronoPeriod plus(TemporalAmount amountToAdd) {
if (amountToAdd instanceof ChronoPeriodImpl) {
ChronoPeriodImpl amount = (ChronoPeriodImpl) amountToAdd;
if (amount.getChronology().equals(getChronology())) {
return new ChronoPeriodImpl(
chronology,
Jdk8Methods.safeAdd(years, amount.years),
Jdk8Methods.safeAdd(months, amount.months),
Jdk8Methods.safeAdd(days, amount.days)); // depends on control dependency: [if], data = [none]
}
}
throw new DateTimeException("Unable to add amount: " + amountToAdd);
} } |
public class class_name {
private Vector decodeEvents(final Document document) {
Vector events = new Vector();
Logger logger;
long timeStamp;
Level level;
String threadName;
Object message = null;
String ndc = null;
String[] exception = null;
String className = null;
String methodName = null;
String fileName = null;
String lineNumber = null;
Hashtable properties = null;
NodeList nl = document.getElementsByTagName("log4j:eventSet");
Node eventSet = nl.item(0);
NodeList eventList = eventSet.getChildNodes();
for (int eventIndex = 0; eventIndex < eventList.getLength();
eventIndex++) {
Node eventNode = eventList.item(eventIndex);
//ignore carriage returns in xml
if (eventNode.getNodeType() != Node.ELEMENT_NODE) {
continue;
}
logger = Logger.getLogger(eventNode.getAttributes().getNamedItem("logger").getNodeValue());
timeStamp = Long.parseLong(eventNode.getAttributes().getNamedItem("timestamp").getNodeValue());
level = Level.toLevel(eventNode.getAttributes().getNamedItem("level").getNodeValue());
threadName = eventNode.getAttributes().getNamedItem("thread").getNodeValue();
NodeList list = eventNode.getChildNodes();
int listLength = list.getLength();
if (listLength == 0) {
continue;
}
for (int y = 0; y < listLength; y++) {
String tagName = list.item(y).getNodeName();
if (tagName.equalsIgnoreCase("log4j:message")) {
message = getCData(list.item(y));
}
if (tagName.equalsIgnoreCase("log4j:NDC")) {
ndc = getCData(list.item(y));
}
//still support receiving of MDC and convert to properties
if (tagName.equalsIgnoreCase("log4j:MDC")) {
properties = new Hashtable();
NodeList propertyList = list.item(y).getChildNodes();
int propertyLength = propertyList.getLength();
for (int i = 0; i < propertyLength; i++) {
String propertyTag = propertyList.item(i).getNodeName();
if (propertyTag.equalsIgnoreCase("log4j:data")) {
Node property = propertyList.item(i);
String name =
property.getAttributes().getNamedItem("name").getNodeValue();
String value =
property.getAttributes().getNamedItem("value").getNodeValue();
properties.put(name, value);
}
}
}
if (tagName.equalsIgnoreCase("log4j:throwable")) {
String exceptionString = getCData(list.item(y));
if (exceptionString != null && !exceptionString.trim().equals("")) {
exception = new String[] {exceptionString.trim()
};
}
}
if (tagName.equalsIgnoreCase("log4j:locationinfo")) {
className =
list.item(y).getAttributes().getNamedItem("class").getNodeValue();
methodName =
list.item(y).getAttributes().getNamedItem("method").getNodeValue();
fileName =
list.item(y).getAttributes().getNamedItem("file").getNodeValue();
lineNumber =
list.item(y).getAttributes().getNamedItem("line").getNodeValue();
}
if (tagName.equalsIgnoreCase("log4j:properties")) {
if (properties == null) {
properties = new Hashtable();
}
NodeList propertyList = list.item(y).getChildNodes();
int propertyLength = propertyList.getLength();
for (int i = 0; i < propertyLength; i++) {
String propertyTag = propertyList.item(i).getNodeName();
if (propertyTag.equalsIgnoreCase("log4j:data")) {
Node property = propertyList.item(i);
String name =
property.getAttributes().getNamedItem("name").getNodeValue();
String value =
property.getAttributes().getNamedItem("value").getNodeValue();
properties.put(name, value);
}
}
}
/**
* We add all the additional properties to the properties
* hashtable. Override properties that already exist
*/
if (additionalProperties.size() > 0) {
if (properties == null) {
properties = new Hashtable(additionalProperties);
}
Iterator i = additionalProperties.entrySet().iterator();
while (i.hasNext()) {
Map.Entry e = (Map.Entry) i.next();
properties.put(e.getKey(), e.getValue());
}
}
}
LocationInfo info;
if ((fileName != null)
|| (className != null)
|| (methodName != null)
|| (lineNumber != null)) {
info = new LocationInfo(fileName, className, methodName, lineNumber);
} else {
info = LocationInfo.NA_LOCATION_INFO;
}
ThrowableInformation throwableInfo = null;
if (exception != null) {
throwableInfo = new ThrowableInformation(exception);
}
LoggingEvent loggingEvent = new LoggingEvent(null,
logger, timeStamp, level, message,
threadName,
throwableInfo,
ndc,
info,
properties);
events.add(loggingEvent);
message = null;
ndc = null;
exception = null;
className = null;
methodName = null;
fileName = null;
lineNumber = null;
properties = null;
}
return events;
} } | public class class_name {
private Vector decodeEvents(final Document document) {
Vector events = new Vector();
Logger logger;
long timeStamp;
Level level;
String threadName;
Object message = null;
String ndc = null;
String[] exception = null;
String className = null;
String methodName = null;
String fileName = null;
String lineNumber = null;
Hashtable properties = null;
NodeList nl = document.getElementsByTagName("log4j:eventSet");
Node eventSet = nl.item(0);
NodeList eventList = eventSet.getChildNodes();
for (int eventIndex = 0; eventIndex < eventList.getLength();
eventIndex++) {
Node eventNode = eventList.item(eventIndex);
//ignore carriage returns in xml
if (eventNode.getNodeType() != Node.ELEMENT_NODE) {
continue;
}
logger = Logger.getLogger(eventNode.getAttributes().getNamedItem("logger").getNodeValue()); // depends on control dependency: [for], data = [none]
timeStamp = Long.parseLong(eventNode.getAttributes().getNamedItem("timestamp").getNodeValue()); // depends on control dependency: [for], data = [none]
level = Level.toLevel(eventNode.getAttributes().getNamedItem("level").getNodeValue()); // depends on control dependency: [for], data = [none]
threadName = eventNode.getAttributes().getNamedItem("thread").getNodeValue(); // depends on control dependency: [for], data = [none]
NodeList list = eventNode.getChildNodes();
int listLength = list.getLength();
if (listLength == 0) {
continue;
}
for (int y = 0; y < listLength; y++) {
String tagName = list.item(y).getNodeName();
if (tagName.equalsIgnoreCase("log4j:message")) {
message = getCData(list.item(y)); // depends on control dependency: [if], data = [none]
}
if (tagName.equalsIgnoreCase("log4j:NDC")) {
ndc = getCData(list.item(y)); // depends on control dependency: [if], data = [none]
}
//still support receiving of MDC and convert to properties
if (tagName.equalsIgnoreCase("log4j:MDC")) {
properties = new Hashtable(); // depends on control dependency: [if], data = [none]
NodeList propertyList = list.item(y).getChildNodes();
int propertyLength = propertyList.getLength();
for (int i = 0; i < propertyLength; i++) {
String propertyTag = propertyList.item(i).getNodeName();
if (propertyTag.equalsIgnoreCase("log4j:data")) {
Node property = propertyList.item(i);
String name =
property.getAttributes().getNamedItem("name").getNodeValue();
String value =
property.getAttributes().getNamedItem("value").getNodeValue();
properties.put(name, value); // depends on control dependency: [if], data = [none]
}
}
}
if (tagName.equalsIgnoreCase("log4j:throwable")) {
String exceptionString = getCData(list.item(y));
if (exceptionString != null && !exceptionString.trim().equals("")) {
exception = new String[] {exceptionString.trim()
}; // depends on control dependency: [if], data = [none]
}
}
if (tagName.equalsIgnoreCase("log4j:locationinfo")) {
className =
list.item(y).getAttributes().getNamedItem("class").getNodeValue(); // depends on control dependency: [if], data = [none]
methodName =
list.item(y).getAttributes().getNamedItem("method").getNodeValue(); // depends on control dependency: [if], data = [none]
fileName =
list.item(y).getAttributes().getNamedItem("file").getNodeValue(); // depends on control dependency: [if], data = [none]
lineNumber =
list.item(y).getAttributes().getNamedItem("line").getNodeValue(); // depends on control dependency: [if], data = [none]
}
if (tagName.equalsIgnoreCase("log4j:properties")) {
if (properties == null) {
properties = new Hashtable(); // depends on control dependency: [if], data = [none]
}
NodeList propertyList = list.item(y).getChildNodes();
int propertyLength = propertyList.getLength();
for (int i = 0; i < propertyLength; i++) {
String propertyTag = propertyList.item(i).getNodeName();
if (propertyTag.equalsIgnoreCase("log4j:data")) {
Node property = propertyList.item(i);
String name =
property.getAttributes().getNamedItem("name").getNodeValue();
String value =
property.getAttributes().getNamedItem("value").getNodeValue();
properties.put(name, value); // depends on control dependency: [if], data = [none]
}
}
}
/**
* We add all the additional properties to the properties
* hashtable. Override properties that already exist
*/
if (additionalProperties.size() > 0) {
if (properties == null) {
properties = new Hashtable(additionalProperties); // depends on control dependency: [if], data = [none]
}
Iterator i = additionalProperties.entrySet().iterator();
while (i.hasNext()) {
Map.Entry e = (Map.Entry) i.next();
properties.put(e.getKey(), e.getValue()); // depends on control dependency: [while], data = [none]
}
}
}
LocationInfo info;
if ((fileName != null)
|| (className != null)
|| (methodName != null)
|| (lineNumber != null)) {
info = new LocationInfo(fileName, className, methodName, lineNumber); // depends on control dependency: [if], data = [none]
} else {
info = LocationInfo.NA_LOCATION_INFO; // depends on control dependency: [if], data = [none]
}
ThrowableInformation throwableInfo = null;
if (exception != null) {
throwableInfo = new ThrowableInformation(exception); // depends on control dependency: [if], data = [(exception]
}
LoggingEvent loggingEvent = new LoggingEvent(null,
logger, timeStamp, level, message,
threadName,
throwableInfo,
ndc,
info,
properties);
events.add(loggingEvent); // depends on control dependency: [for], data = [none]
message = null; // depends on control dependency: [for], data = [none]
ndc = null; // depends on control dependency: [for], data = [none]
exception = null; // depends on control dependency: [for], data = [none]
className = null; // depends on control dependency: [for], data = [none]
methodName = null; // depends on control dependency: [for], data = [none]
fileName = null; // depends on control dependency: [for], data = [none]
lineNumber = null; // depends on control dependency: [for], data = [none]
properties = null; // depends on control dependency: [for], data = [none]
}
return events;
} } |
public class class_name {
public int executeUpdateDeleteQuery(String cqlQuery) {
if (log.isDebugEnabled()) {
log.debug("Executing cql query {}.", cqlQuery);
}
try {
CqlResult result = (CqlResult) executeCQLQuery(cqlQuery, true);
return result.getNum();
} catch (Exception e) {
log.error("Error while executing updated query: {}, Caused by: . ", cqlQuery, e);
return 0;
}
} } | public class class_name {
public int executeUpdateDeleteQuery(String cqlQuery) {
if (log.isDebugEnabled()) {
log.debug("Executing cql query {}.", cqlQuery); // depends on control dependency: [if], data = [none]
}
try {
CqlResult result = (CqlResult) executeCQLQuery(cqlQuery, true);
return result.getNum(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
log.error("Error while executing updated query: {}, Caused by: . ", cqlQuery, e);
return 0;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@SuppressWarnings("unchecked")
public synchronized static <T extends TunableConstants> T get(Properties p, Class<T> c) {
T cfg = getDefault(c);
if (p != null
&& p.containsKey(TUNE_MARKER)
&& p.containsKey(cfg.getClass().getName() + ".tuning")) {
cfg = (T) cfg.clone();
apply(p, cfg);
}
return cfg;
} } | public class class_name {
@SuppressWarnings("unchecked")
public synchronized static <T extends TunableConstants> T get(Properties p, Class<T> c) {
T cfg = getDefault(c);
if (p != null
&& p.containsKey(TUNE_MARKER)
&& p.containsKey(cfg.getClass().getName() + ".tuning")) {
cfg = (T) cfg.clone(); // depends on control dependency: [if], data = [none]
apply(p, cfg); // depends on control dependency: [if], data = [(p]
}
return cfg;
} } |
public class class_name {
public void initializeSubContainers(CmsContainerPageElementPanel containerElement) {
int containerCount = m_targetContainers.size();
m_targetContainers.putAll(m_containerpageUtil.consumeContainers(m_containers, containerElement.getElement()));
updateContainerLevelInfo();
if (m_targetContainers.size() > containerCount) {
// in case new containers have been added, the gallery data needs to be updated
scheduleGalleryUpdate();
}
} } | public class class_name {
public void initializeSubContainers(CmsContainerPageElementPanel containerElement) {
int containerCount = m_targetContainers.size();
m_targetContainers.putAll(m_containerpageUtil.consumeContainers(m_containers, containerElement.getElement()));
updateContainerLevelInfo();
if (m_targetContainers.size() > containerCount) {
// in case new containers have been added, the gallery data needs to be updated
scheduleGalleryUpdate(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static boolean objectInstanceOf(Object object, String className) {
AssertUtils.assertNotNull(object);
boolean result = false;
Class clazz = object.getClass();
if (clazz.getName().equals(className) == true) {
result = true;
}
return result;
} } | public class class_name {
public static boolean objectInstanceOf(Object object, String className) {
AssertUtils.assertNotNull(object);
boolean result = false;
Class clazz = object.getClass();
if (clazz.getName().equals(className) == true) {
result = true;
// depends on control dependency: [if], data = [none]
}
return result;
} } |
public class class_name {
public static Marshaller createMarshallerForClass(Class<?> clazz) {
try {
return JAXBContext.newInstance(clazz).createMarshaller();
} catch (JAXBException e) {
throw new AllureException("Can't create marshaller for class " + clazz, e);
}
} } | public class class_name {
public static Marshaller createMarshallerForClass(Class<?> clazz) {
try {
return JAXBContext.newInstance(clazz).createMarshaller(); // depends on control dependency: [try], data = [none]
} catch (JAXBException e) {
throw new AllureException("Can't create marshaller for class " + clazz, e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public ArtifactName build() {
String groupId = this.groupId;
String artifactId = this.artifactId;
String classifier = this.classifier;
String packaging = this.packaging;
String version = this.version;
if (artifact != null && !artifact.isEmpty()) {
final String[] artifactSegments = artifact.split(":");
// groupId:artifactId:version[:packaging][:classifier].
String value;
switch (artifactSegments.length) {
case 5:
value = artifactSegments[4].trim();
if (!value.isEmpty()) {
classifier = value;
}
case 4:
value = artifactSegments[3].trim();
if (!value.isEmpty()) {
packaging = value;
}
case 3:
value = artifactSegments[2].trim();
if (!value.isEmpty()) {
version = value;
}
case 2:
value = artifactSegments[1].trim();
if (!value.isEmpty()) {
artifactId = value;
}
case 1:
value = artifactSegments[0].trim();
if (!value.isEmpty()) {
groupId = value;
}
}
}
return new ArtifactNameImpl(groupId, artifactId, classifier, packaging, version);
} } | public class class_name {
public ArtifactName build() {
String groupId = this.groupId;
String artifactId = this.artifactId;
String classifier = this.classifier;
String packaging = this.packaging;
String version = this.version;
if (artifact != null && !artifact.isEmpty()) {
final String[] artifactSegments = artifact.split(":");
// groupId:artifactId:version[:packaging][:classifier].
String value;
switch (artifactSegments.length) {
case 5:
value = artifactSegments[4].trim();
if (!value.isEmpty()) {
classifier = value; // depends on control dependency: [if], data = [none]
}
case 4:
value = artifactSegments[3].trim();
if (!value.isEmpty()) {
packaging = value; // depends on control dependency: [if], data = [none]
}
case 3:
value = artifactSegments[2].trim();
if (!value.isEmpty()) {
version = value; // depends on control dependency: [if], data = [none]
}
case 2:
value = artifactSegments[1].trim();
if (!value.isEmpty()) {
artifactId = value; // depends on control dependency: [if], data = [none]
}
case 1:
value = artifactSegments[0].trim();
if (!value.isEmpty()) {
groupId = value; // depends on control dependency: [if], data = [none]
}
}
}
return new ArtifactNameImpl(groupId, artifactId, classifier, packaging, version);
} } |
public class class_name {
@Override
public Map<String, Object> getBodyParameters() {
HashMap<String, Object> params = new HashMap<String, Object>();
params.put("itemType", this.itemType);
params.put("itemId", this.itemId);
params.put("time", this.time);
if (this.cascadeCreate!=null) {
params.put("cascadeCreate", this.cascadeCreate);
}
return params;
} } | public class class_name {
@Override
public Map<String, Object> getBodyParameters() {
HashMap<String, Object> params = new HashMap<String, Object>();
params.put("itemType", this.itemType);
params.put("itemId", this.itemId);
params.put("time", this.time);
if (this.cascadeCreate!=null) {
params.put("cascadeCreate", this.cascadeCreate); // depends on control dependency: [if], data = [none]
}
return params;
} } |
public class class_name {
public java.util.List<MetricCollectionType> getMetrics() {
if (metrics == null) {
metrics = new com.amazonaws.internal.SdkInternalList<MetricCollectionType>();
}
return metrics;
} } | public class class_name {
public java.util.List<MetricCollectionType> getMetrics() {
if (metrics == null) {
metrics = new com.amazonaws.internal.SdkInternalList<MetricCollectionType>(); // depends on control dependency: [if], data = [none]
}
return metrics;
} } |
public class class_name {
public String getOriginatingBus()
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
{
SibTr.entry(this, tc, "getOriginatingBus");
SibTr.exit(this, tc, "getOriginatingBus", _busName);
}
return _busName;
} } | public class class_name {
public String getOriginatingBus()
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
{
SibTr.entry(this, tc, "getOriginatingBus"); // depends on control dependency: [if], data = [none]
SibTr.exit(this, tc, "getOriginatingBus", _busName); // depends on control dependency: [if], data = [none]
}
return _busName;
} } |
public class class_name {
public final static int put(IoBuffer out, IoBuffer in, int numBytesMax) {
if (log.isTraceEnabled()) {
log.trace("Put\nin buffer: {}\nout buffer: {}\nmax bytes: {}", new Object[] { out, in, numBytesMax });
}
int numBytesRead = 0;
if (in != null) {
int limit = Math.min(in.limit(), numBytesMax);
byte[] inBuf = new byte[limit];
log.trace("Bulk get size: {}", limit);
in.get(inBuf);
byte[] outBuf = consumeBytes(inBuf, numBytesMax);
out.put(outBuf);
numBytesRead = outBuf.length;
log.trace("In pos: {}", in.position());
}
log.trace("Bytes put: {}", numBytesRead);
return numBytesRead;
} } | public class class_name {
public final static int put(IoBuffer out, IoBuffer in, int numBytesMax) {
if (log.isTraceEnabled()) {
log.trace("Put\nin buffer: {}\nout buffer: {}\nmax bytes: {}", new Object[] { out, in, numBytesMax }); // depends on control dependency: [if], data = [none]
}
int numBytesRead = 0;
if (in != null) {
int limit = Math.min(in.limit(), numBytesMax);
byte[] inBuf = new byte[limit];
log.trace("Bulk get size: {}", limit); // depends on control dependency: [if], data = [none]
in.get(inBuf); // depends on control dependency: [if], data = [(in]
byte[] outBuf = consumeBytes(inBuf, numBytesMax);
out.put(outBuf); // depends on control dependency: [if], data = [none]
numBytesRead = outBuf.length; // depends on control dependency: [if], data = [none]
log.trace("In pos: {}", in.position()); // depends on control dependency: [if], data = [none]
}
log.trace("Bytes put: {}", numBytesRead);
return numBytesRead;
} } |
public class class_name {
private void initSuffixMargin() {
int defSuffixLRMargin = Utils.dp2px(mContext, DEFAULT_SUFFIX_LR_MARGIN);
boolean isSuffixLRMarginNull = true;
if (mSuffixLRMargin >= 0) {
isSuffixLRMarginNull = false;
}
if (isShowDay && mSuffixDayTextWidth > 0) {
if (mSuffixDayLeftMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixDayLeftMargin = mSuffixLRMargin;
} else {
mSuffixDayLeftMargin = defSuffixLRMargin;
}
}
if (mSuffixDayRightMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixDayRightMargin = mSuffixLRMargin;
} else {
mSuffixDayRightMargin = defSuffixLRMargin;
}
}
} else {
mSuffixDayLeftMargin = 0;
mSuffixDayRightMargin = 0;
}
if (isShowHour && mSuffixHourTextWidth > 0) {
if (mSuffixHourLeftMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixHourLeftMargin = mSuffixLRMargin;
} else {
mSuffixHourLeftMargin = defSuffixLRMargin;
}
}
if (mSuffixHourRightMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixHourRightMargin = mSuffixLRMargin;
} else {
mSuffixHourRightMargin = defSuffixLRMargin;
}
}
} else {
mSuffixHourLeftMargin = 0;
mSuffixHourRightMargin = 0;
}
if (isShowMinute && mSuffixMinuteTextWidth > 0) {
if (mSuffixMinuteLeftMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixMinuteLeftMargin = mSuffixLRMargin;
} else {
mSuffixMinuteLeftMargin = defSuffixLRMargin;
}
}
if (isShowSecond) {
if (mSuffixMinuteRightMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixMinuteRightMargin = mSuffixLRMargin;
} else {
mSuffixMinuteRightMargin = defSuffixLRMargin;
}
}
} else {
mSuffixMinuteRightMargin = 0;
}
} else {
mSuffixMinuteLeftMargin = 0;
mSuffixMinuteRightMargin = 0;
}
if (isShowSecond) {
if (mSuffixSecondTextWidth > 0) {
if (mSuffixSecondLeftMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixSecondLeftMargin = mSuffixLRMargin;
} else {
mSuffixSecondLeftMargin = defSuffixLRMargin;
}
}
if (isShowMillisecond) {
if (mSuffixSecondRightMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixSecondRightMargin = mSuffixLRMargin;
} else {
mSuffixSecondRightMargin = defSuffixLRMargin;
}
}
} else {
mSuffixSecondRightMargin = 0;
}
} else {
mSuffixSecondLeftMargin = 0;
mSuffixSecondRightMargin = 0;
}
if (isShowMillisecond && mSuffixMillisecondTextWidth > 0) {
if (mSuffixMillisecondLeftMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixMillisecondLeftMargin = mSuffixLRMargin;
} else {
mSuffixMillisecondLeftMargin = defSuffixLRMargin;
}
}
} else {
mSuffixMillisecondLeftMargin = 0;
}
} else {
mSuffixSecondLeftMargin = 0;
mSuffixSecondRightMargin = 0;
mSuffixMillisecondLeftMargin = 0;
}
} } | public class class_name {
private void initSuffixMargin() {
int defSuffixLRMargin = Utils.dp2px(mContext, DEFAULT_SUFFIX_LR_MARGIN);
boolean isSuffixLRMarginNull = true;
if (mSuffixLRMargin >= 0) {
isSuffixLRMarginNull = false; // depends on control dependency: [if], data = [none]
}
if (isShowDay && mSuffixDayTextWidth > 0) {
if (mSuffixDayLeftMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixDayLeftMargin = mSuffixLRMargin; // depends on control dependency: [if], data = [none]
} else {
mSuffixDayLeftMargin = defSuffixLRMargin; // depends on control dependency: [if], data = [none]
}
}
if (mSuffixDayRightMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixDayRightMargin = mSuffixLRMargin; // depends on control dependency: [if], data = [none]
} else {
mSuffixDayRightMargin = defSuffixLRMargin; // depends on control dependency: [if], data = [none]
}
}
} else {
mSuffixDayLeftMargin = 0; // depends on control dependency: [if], data = [none]
mSuffixDayRightMargin = 0; // depends on control dependency: [if], data = [none]
}
if (isShowHour && mSuffixHourTextWidth > 0) {
if (mSuffixHourLeftMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixHourLeftMargin = mSuffixLRMargin; // depends on control dependency: [if], data = [none]
} else {
mSuffixHourLeftMargin = defSuffixLRMargin; // depends on control dependency: [if], data = [none]
}
}
if (mSuffixHourRightMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixHourRightMargin = mSuffixLRMargin; // depends on control dependency: [if], data = [none]
} else {
mSuffixHourRightMargin = defSuffixLRMargin; // depends on control dependency: [if], data = [none]
}
}
} else {
mSuffixHourLeftMargin = 0; // depends on control dependency: [if], data = [none]
mSuffixHourRightMargin = 0; // depends on control dependency: [if], data = [none]
}
if (isShowMinute && mSuffixMinuteTextWidth > 0) {
if (mSuffixMinuteLeftMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixMinuteLeftMargin = mSuffixLRMargin; // depends on control dependency: [if], data = [none]
} else {
mSuffixMinuteLeftMargin = defSuffixLRMargin; // depends on control dependency: [if], data = [none]
}
}
if (isShowSecond) {
if (mSuffixMinuteRightMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixMinuteRightMargin = mSuffixLRMargin; // depends on control dependency: [if], data = [none]
} else {
mSuffixMinuteRightMargin = defSuffixLRMargin; // depends on control dependency: [if], data = [none]
}
}
} else {
mSuffixMinuteRightMargin = 0; // depends on control dependency: [if], data = [none]
}
} else {
mSuffixMinuteLeftMargin = 0; // depends on control dependency: [if], data = [none]
mSuffixMinuteRightMargin = 0; // depends on control dependency: [if], data = [none]
}
if (isShowSecond) {
if (mSuffixSecondTextWidth > 0) {
if (mSuffixSecondLeftMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixSecondLeftMargin = mSuffixLRMargin; // depends on control dependency: [if], data = [none]
} else {
mSuffixSecondLeftMargin = defSuffixLRMargin; // depends on control dependency: [if], data = [none]
}
}
if (isShowMillisecond) {
if (mSuffixSecondRightMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixSecondRightMargin = mSuffixLRMargin; // depends on control dependency: [if], data = [none]
} else {
mSuffixSecondRightMargin = defSuffixLRMargin; // depends on control dependency: [if], data = [none]
}
}
} else {
mSuffixSecondRightMargin = 0; // depends on control dependency: [if], data = [none]
}
} else {
mSuffixSecondLeftMargin = 0; // depends on control dependency: [if], data = [none]
mSuffixSecondRightMargin = 0; // depends on control dependency: [if], data = [none]
}
if (isShowMillisecond && mSuffixMillisecondTextWidth > 0) {
if (mSuffixMillisecondLeftMargin < 0) {
if (!isSuffixLRMarginNull) {
mSuffixMillisecondLeftMargin = mSuffixLRMargin; // depends on control dependency: [if], data = [none]
} else {
mSuffixMillisecondLeftMargin = defSuffixLRMargin; // depends on control dependency: [if], data = [none]
}
}
} else {
mSuffixMillisecondLeftMargin = 0; // depends on control dependency: [if], data = [none]
}
} else {
mSuffixSecondLeftMargin = 0; // depends on control dependency: [if], data = [none]
mSuffixSecondRightMargin = 0; // depends on control dependency: [if], data = [none]
mSuffixMillisecondLeftMargin = 0; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@SuppressWarnings("unchecked")
@Override
public long getLastModified(HttpServletRequest request) throws IOException {
long lastModified = _lastModified;
IAggregator aggregator = (IAggregator)request.getAttribute(IAggregator.AGGREGATOR_REQATTRNAME);
IOptions options = aggregator.getOptions();
// Don't check last modified dates of source files on every request in production mode
// for performance reasons. _validateLastModified is a transient that gets initialize
// to true whenever this object is de-serialized (i.e. on server startup).
if (lastModified == -1 || _validateLastModified.getAndSet(false) || options.isDevelopmentMode()) {
// see if we already determined the last modified time for this request
Object obj = request.getAttribute(LAST_MODIFIED_PROPNAME);
if (obj == null) {
// Determine latest last-modified time from source files in moduleList
ModuleList moduleFiles = getModules(request);
lastModified = getLastModified(aggregator, moduleFiles);
// Get last-modified date of config file
lastModified = Math.max(
lastModified,
aggregator.getConfig().lastModified());
List<String> cacheInfoReport = null;
if (_isReportCacheInfo) {
cacheInfoReport = (List<String>)request.getAttribute(LAYERCACHEINFO_PROPNAME);
}
synchronized(this) {
if (_lastModified == -1) {
// Initialize value of instance property
_lastModified = lastModified;
if (cacheInfoReport != null) {
cacheInfoReport.add("update_lastmod1"); //$NON-NLS-1$
}
}
}
request.setAttribute(LAST_MODIFIED_PROPNAME, lastModified);
if (log.isLoggable(Level.FINER)) {
log.finer("Returning calculated last modified " //$NON-NLS-1$
+ lastModified + " for layer " + //$NON-NLS-1$
request.getAttribute(IHttpTransport.REQUESTEDMODULENAMES_REQATTRNAME).toString());
}
} else {
lastModified = (Long)obj;
if (log.isLoggable(Level.FINER)) {
log.finer("Returning last modified " //$NON-NLS-1$
+ lastModified + " from request for layer " + //$NON-NLS-1$
request.getAttribute(IHttpTransport.REQUESTEDMODULENAMES_REQATTRNAME).toString());
}
}
} else {
if (log.isLoggable(Level.FINER)) {
log.finer("Returning cached last modified " //$NON-NLS-1$
+ lastModified + " for layer " + //$NON-NLS-1$
request.getAttribute(IHttpTransport.REQUESTEDMODULENAMES_REQATTRNAME).toString());
}
}
return lastModified;
} } | public class class_name {
@SuppressWarnings("unchecked")
@Override
public long getLastModified(HttpServletRequest request) throws IOException {
long lastModified = _lastModified;
IAggregator aggregator = (IAggregator)request.getAttribute(IAggregator.AGGREGATOR_REQATTRNAME);
IOptions options = aggregator.getOptions();
// Don't check last modified dates of source files on every request in production mode
// for performance reasons. _validateLastModified is a transient that gets initialize
// to true whenever this object is de-serialized (i.e. on server startup).
if (lastModified == -1 || _validateLastModified.getAndSet(false) || options.isDevelopmentMode()) {
// see if we already determined the last modified time for this request
Object obj = request.getAttribute(LAST_MODIFIED_PROPNAME);
if (obj == null) {
// Determine latest last-modified time from source files in moduleList
ModuleList moduleFiles = getModules(request);
lastModified = getLastModified(aggregator, moduleFiles);
// Get last-modified date of config file
lastModified = Math.max(
lastModified,
aggregator.getConfig().lastModified());
List<String> cacheInfoReport = null;
if (_isReportCacheInfo) {
cacheInfoReport = (List<String>)request.getAttribute(LAYERCACHEINFO_PROPNAME);
}
synchronized(this) {
if (_lastModified == -1) {
// Initialize value of instance property
_lastModified = lastModified;
// depends on control dependency: [if], data = [none]
if (cacheInfoReport != null) {
cacheInfoReport.add("update_lastmod1"); //$NON-NLS-1$
// depends on control dependency: [if], data = [none]
}
}
}
request.setAttribute(LAST_MODIFIED_PROPNAME, lastModified);
if (log.isLoggable(Level.FINER)) {
log.finer("Returning calculated last modified " //$NON-NLS-1$
+ lastModified + " for layer " + //$NON-NLS-1$
request.getAttribute(IHttpTransport.REQUESTEDMODULENAMES_REQATTRNAME).toString());
}
} else {
lastModified = (Long)obj;
if (log.isLoggable(Level.FINER)) {
log.finer("Returning last modified " //$NON-NLS-1$
+ lastModified + " from request for layer " + //$NON-NLS-1$
request.getAttribute(IHttpTransport.REQUESTEDMODULENAMES_REQATTRNAME).toString());
}
}
} else {
if (log.isLoggable(Level.FINER)) {
log.finer("Returning cached last modified " //$NON-NLS-1$
+ lastModified + " for layer " + //$NON-NLS-1$
request.getAttribute(IHttpTransport.REQUESTEDMODULENAMES_REQATTRNAME).toString());
}
}
return lastModified;
} } |
public class class_name {
@Override
public final void encryptFile(final String pFilePath,
final String pEncryptedPath) throws Exception {
KeyGenerator keyGenAes = KeyGenerator.
getInstance("AES", this.cryptoProvider);
keyGenAes.init(256, new SecureRandom());
SecretKey sskAes = keyGenAes.generateKey();
Cipher cipherRsa = Cipher.getInstance("RSA", this.cryptoProvider);
cipherRsa.init(Cipher.ENCRYPT_MODE, lazyGetPublicKeyAnotherAjetty());
byte[] encryptedSsk = cipherRsa.doFinal(sskAes.getEncoded());
Signature sigMk = Signature.getInstance("SHA256withRSA");
sigMk.initSign(lazyGetOurPrivateKey(), new SecureRandom());
sigMk.update(encryptedSsk);
byte[] sigSsk = sigMk.sign();
Cipher cipherAes = Cipher.getInstance("AES/ECB/PKCS7Padding",
this.cryptoProvider);
cipherAes.init(Cipher.ENCRYPT_MODE, sskAes);
BufferedInputStream bis = null;
CipherOutputStream cous = null;
byte[] buffer = new byte[1024];
int len;
try {
bis = new BufferedInputStream(new FileInputStream(pFilePath));
cous = new CipherOutputStream(new FileOutputStream(pEncryptedPath),
cipherAes);
while ((len = bis.read(buffer)) > 0) {
cous.write(buffer, 0, len);
}
cous.flush();
} finally {
if (bis != null) {
try {
bis.close();
} catch (Exception e) {
e.printStackTrace();
}
}
if (cous != null) {
try {
cous.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
bis = null;
try {
sigMk.initSign(lazyGetOurPrivateKey(), new SecureRandom());
bis = new BufferedInputStream(new FileInputStream(pEncryptedPath));
while ((len = bis.read(buffer)) > 0) {
sigMk.update(buffer, 0, len);
}
} finally {
if (bis != null) {
try {
bis.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
byte[] sigDt = sigMk.sign();
// write SSK, signatures
FileOutputStream fos = null;
try {
fos = new FileOutputStream(pEncryptedPath + ".sken");
fos.write(encryptedSsk);
fos.flush();
} finally {
if (fos != null) {
try {
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
fos = null;
try {
fos = new FileOutputStream(pEncryptedPath + ".sken.sig");
fos.write(sigSsk);
fos.flush();
} finally {
if (fos != null) {
try {
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
fos = null;
try {
fos = new FileOutputStream(pEncryptedPath + ".sig");
fos.write(sigDt);
fos.flush();
} finally {
if (fos != null) {
try {
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
} } | public class class_name {
@Override
public final void encryptFile(final String pFilePath,
final String pEncryptedPath) throws Exception {
KeyGenerator keyGenAes = KeyGenerator.
getInstance("AES", this.cryptoProvider);
keyGenAes.init(256, new SecureRandom());
SecretKey sskAes = keyGenAes.generateKey();
Cipher cipherRsa = Cipher.getInstance("RSA", this.cryptoProvider);
cipherRsa.init(Cipher.ENCRYPT_MODE, lazyGetPublicKeyAnotherAjetty());
byte[] encryptedSsk = cipherRsa.doFinal(sskAes.getEncoded());
Signature sigMk = Signature.getInstance("SHA256withRSA");
sigMk.initSign(lazyGetOurPrivateKey(), new SecureRandom());
sigMk.update(encryptedSsk);
byte[] sigSsk = sigMk.sign();
Cipher cipherAes = Cipher.getInstance("AES/ECB/PKCS7Padding",
this.cryptoProvider);
cipherAes.init(Cipher.ENCRYPT_MODE, sskAes);
BufferedInputStream bis = null;
CipherOutputStream cous = null;
byte[] buffer = new byte[1024];
int len;
try {
bis = new BufferedInputStream(new FileInputStream(pFilePath));
cous = new CipherOutputStream(new FileOutputStream(pEncryptedPath),
cipherAes);
while ((len = bis.read(buffer)) > 0) {
cous.write(buffer, 0, len); // depends on control dependency: [while], data = [none]
}
cous.flush();
} finally {
if (bis != null) {
try {
bis.close(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
}
if (cous != null) {
try {
cous.close(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
}
}
bis = null;
try {
sigMk.initSign(lazyGetOurPrivateKey(), new SecureRandom());
bis = new BufferedInputStream(new FileInputStream(pEncryptedPath));
while ((len = bis.read(buffer)) > 0) {
sigMk.update(buffer, 0, len); // depends on control dependency: [while], data = [none]
}
} finally {
if (bis != null) {
try {
bis.close(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
}
}
byte[] sigDt = sigMk.sign();
// write SSK, signatures
FileOutputStream fos = null;
try {
fos = new FileOutputStream(pEncryptedPath + ".sken");
fos.write(encryptedSsk);
fos.flush();
} finally {
if (fos != null) {
try {
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
fos = null;
try {
fos = new FileOutputStream(pEncryptedPath + ".sken.sig");
fos.write(sigSsk);
fos.flush();
} finally {
if (fos != null) {
try {
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
fos = null;
try {
fos = new FileOutputStream(pEncryptedPath + ".sig");
fos.write(sigDt);
fos.flush();
} finally {
if (fos != null) {
try {
fos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
} } |
public class class_name {
protected void estimateDensities(Relation<O> rel, KNNQuery<O> knnq, final DBIDs ids, WritableDataStore<double[]> densities) {
final int dim = dimensionality(rel);
final int knum = kmax + 1 - kmin;
// Initialize storage:
for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
densities.put(iter, new double[knum]);
}
// Distribute densities:
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Computing densities", ids.size(), LOG) : null;
double iminbw = (minBandwidth > 0.) ? 1. / (minBandwidth * scale) : Double.POSITIVE_INFINITY;
for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
KNNList neighbors = knnq.getKNNForDBID(iter, kmax + 1);
int k = 1, idx = 0;
double sum = 0.;
for(DoubleDBIDListIter kneighbor = neighbors.iter(); k <= kmax && kneighbor.valid(); kneighbor.advance(), k++) {
sum += kneighbor.doubleValue();
if(k < kmin) {
continue;
}
final double ibw = Math.min(k / (sum * scale), iminbw);
final double sca = MathUtil.powi(ibw, dim);
for(DoubleDBIDListIter neighbor = neighbors.iter(); neighbor.valid(); neighbor.advance()) {
final double dens;
if(sca < Double.POSITIVE_INFINITY) { // NaNs with duplicate points!
dens = sca * kernel.density(neighbor.doubleValue() * ibw);
}
else {
dens = neighbor.doubleValue() == 0. ? 1. : 0.;
}
densities.get(neighbor)[idx] += dens;
if(dens < CUTOFF) {
break;
}
}
++idx; // Only if k >= kmin
}
LOG.incrementProcessed(prog);
}
LOG.ensureCompleted(prog);
} } | public class class_name {
protected void estimateDensities(Relation<O> rel, KNNQuery<O> knnq, final DBIDs ids, WritableDataStore<double[]> densities) {
final int dim = dimensionality(rel);
final int knum = kmax + 1 - kmin;
// Initialize storage:
for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
densities.put(iter, new double[knum]); // depends on control dependency: [for], data = [iter]
}
// Distribute densities:
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Computing densities", ids.size(), LOG) : null;
double iminbw = (minBandwidth > 0.) ? 1. / (minBandwidth * scale) : Double.POSITIVE_INFINITY;
for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
KNNList neighbors = knnq.getKNNForDBID(iter, kmax + 1);
int k = 1, idx = 0;
double sum = 0.;
for(DoubleDBIDListIter kneighbor = neighbors.iter(); k <= kmax && kneighbor.valid(); kneighbor.advance(), k++) {
sum += kneighbor.doubleValue(); // depends on control dependency: [for], data = [kneighbor]
if(k < kmin) {
continue;
}
final double ibw = Math.min(k / (sum * scale), iminbw);
final double sca = MathUtil.powi(ibw, dim);
for(DoubleDBIDListIter neighbor = neighbors.iter(); neighbor.valid(); neighbor.advance()) {
final double dens;
if(sca < Double.POSITIVE_INFINITY) { // NaNs with duplicate points!
dens = sca * kernel.density(neighbor.doubleValue() * ibw); // depends on control dependency: [if], data = [none]
}
else {
dens = neighbor.doubleValue() == 0. ? 1. : 0.; // depends on control dependency: [if], data = [none]
}
densities.get(neighbor)[idx] += dens; // depends on control dependency: [for], data = [neighbor]
if(dens < CUTOFF) {
break;
}
}
++idx; // Only if k >= kmin // depends on control dependency: [for], data = [none]
}
LOG.incrementProcessed(prog); // depends on control dependency: [for], data = [none]
}
LOG.ensureCompleted(prog);
} } |
public class class_name {
public void alarm(Object thandle)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "alarm", thandle);
boolean doClose = false;
// We're probably about to close this consumer, which needs us to remove it from the AOStream's
// consciousness first (so that it doesn't allow it to be used while we're closing it - as that's
// done outside of the lock). To do this we need to take the AOStream's (parent's) lock first and
// remove it from the parent's list prior to releasing the lock.
synchronized(parent)
{
this.lock();
try
{
if (idleHandler != null)
{ // so we are still idle
if (!listOfRequests.isEmpty())
{ // we have an outstanding request. idleHandler should be null, and therefore != thandle
// Since this should never occur, log this error
SIErrorException e =
new SIErrorException(
nls.getFormattedMessage(
"INTERNAL_MESSAGING_ERROR_CWSIP0001",
new Object[] {
"com.ibm.ws.sib.processor.impl.JSRemoteConsumerPoint",
"1:1121:1.43.2.26" },
null));
FFDCFilter.processException(
e,
"com.ibm.ws.sib.processor.impl.JSRemoteConsumerPoint.alarm",
"1:1126:1.43.2.26",
this);
SibTr.exception(tc, e);
SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0001",
new Object[] {
"com.ibm.ws.sib.processor.impl.JSRemoteConsumerPoint",
"1:1132:1.43.2.26" });
}
else
{
// Remove this consumer so that it is no longer handed out for new requests (they'll
// have to create a new consumer to use)
parent.removeConsumerKey(selectionCriteriasAsString, this);
doClose = true;
}
}
}
finally
{
this.unlock();
}
} // synchronised(parent)
if (doClose)
{
close(); //Never call close() from within synchronized (this)!!
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "alarm");
} } | public class class_name {
public void alarm(Object thandle)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "alarm", thandle);
boolean doClose = false;
// We're probably about to close this consumer, which needs us to remove it from the AOStream's
// consciousness first (so that it doesn't allow it to be used while we're closing it - as that's
// done outside of the lock). To do this we need to take the AOStream's (parent's) lock first and
// remove it from the parent's list prior to releasing the lock.
synchronized(parent)
{
this.lock();
try
{
if (idleHandler != null)
{ // so we are still idle
if (!listOfRequests.isEmpty())
{ // we have an outstanding request. idleHandler should be null, and therefore != thandle
// Since this should never occur, log this error
SIErrorException e =
new SIErrorException(
nls.getFormattedMessage(
"INTERNAL_MESSAGING_ERROR_CWSIP0001",
new Object[] {
"com.ibm.ws.sib.processor.impl.JSRemoteConsumerPoint",
"1:1121:1.43.2.26" },
null));
FFDCFilter.processException(
e,
"com.ibm.ws.sib.processor.impl.JSRemoteConsumerPoint.alarm",
"1:1126:1.43.2.26",
this); // depends on control dependency: [if], data = [none]
SibTr.exception(tc, e); // depends on control dependency: [if], data = [none]
SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0001",
new Object[] {
"com.ibm.ws.sib.processor.impl.JSRemoteConsumerPoint",
"1:1132:1.43.2.26" }); // depends on control dependency: [if], data = [none]
}
else
{
// Remove this consumer so that it is no longer handed out for new requests (they'll
// have to create a new consumer to use)
parent.removeConsumerKey(selectionCriteriasAsString, this); // depends on control dependency: [if], data = [none]
doClose = true; // depends on control dependency: [if], data = [none]
}
}
}
finally
{
this.unlock();
}
} // synchronised(parent)
if (doClose)
{
close(); //Never call close() from within synchronized (this)!! // depends on control dependency: [if], data = [none]
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "alarm");
} } |
public class class_name {
public <T> T doWithGroups(final GroupAction<T> action, final Class<?>... groups) {
pushContext(groups);
try {
return action.call();
} catch (Throwable ex) {
throw Throwables.propagate(ex);
} finally {
popContext();
}
} } | public class class_name {
public <T> T doWithGroups(final GroupAction<T> action, final Class<?>... groups) {
pushContext(groups);
try {
return action.call(); // depends on control dependency: [try], data = [none]
} catch (Throwable ex) {
throw Throwables.propagate(ex);
} finally { // depends on control dependency: [catch], data = [none]
popContext();
}
} } |
public class class_name {
@Override
public void onGoAway(final GoAwayFrame frame) {
if (log.isDebugEnabled()) {
log.debug("Received {}", frame.toString());
}
while (true) {
CloseState current = closed.get();
switch (current) {
case NOT_CLOSED: {
if (closed.compareAndSet(current, CloseState.REMOTELY_CLOSED)) {
// We received a GO_AWAY, so try to write
// what's in the queue and then disconnect.
notifyClose(this, frame, new DisconnectCallback());
return;
}
break;
}
default: {
if (log.isDebugEnabled()) {
log.debug("Ignored {}, already closed", frame.toString());
}
return;
}
}
}
} } | public class class_name {
@Override
public void onGoAway(final GoAwayFrame frame) {
if (log.isDebugEnabled()) {
log.debug("Received {}", frame.toString()); // depends on control dependency: [if], data = [none]
}
while (true) {
CloseState current = closed.get();
switch (current) {
case NOT_CLOSED: {
if (closed.compareAndSet(current, CloseState.REMOTELY_CLOSED)) {
// We received a GO_AWAY, so try to write
// what's in the queue and then disconnect.
notifyClose(this, frame, new DisconnectCallback()); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
break;
}
default: {
if (log.isDebugEnabled()) {
log.debug("Ignored {}, already closed", frame.toString()); // depends on control dependency: [if], data = [none]
}
return;
}
}
}
} } |
public class class_name {
@Nullable
private List<LookupBean> getLookupListFromSnapshot()
{
if (lookupSnapshotTaker != null) {
return lookupSnapshotTaker.pullExistingSnapshot(lookupListeningAnnouncerConfig.getLookupTier());
}
return null;
} } | public class class_name {
@Nullable
private List<LookupBean> getLookupListFromSnapshot()
{
if (lookupSnapshotTaker != null) {
return lookupSnapshotTaker.pullExistingSnapshot(lookupListeningAnnouncerConfig.getLookupTier()); // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
public void marshall(Deinterlacer deinterlacer, ProtocolMarshaller protocolMarshaller) {
if (deinterlacer == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deinterlacer.getAlgorithm(), ALGORITHM_BINDING);
protocolMarshaller.marshall(deinterlacer.getControl(), CONTROL_BINDING);
protocolMarshaller.marshall(deinterlacer.getMode(), MODE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(Deinterlacer deinterlacer, ProtocolMarshaller protocolMarshaller) {
if (deinterlacer == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deinterlacer.getAlgorithm(), ALGORITHM_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(deinterlacer.getControl(), CONTROL_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(deinterlacer.getMode(), MODE_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void computeMi() {
Mi.assign(0.0);
model.taggerFGen.startScanEFeatures();
while (model.taggerFGen.hasNextEFeature()) {
Feature f = model.taggerFGen.nextEFeature();
if (f.ftype == Feature.EDGE_FEATURE1) {
Mi.mtrx[f.yp][f.y] += model.lambda[f.idx] * f.val;
}
}
for (int i = 0; i < Mi.rows; i++) {
for (int j = 0; j < Mi.cols; j++) {
Mi.mtrx[i][j] = Math.exp(Mi.mtrx[i][j]);
}
}
} } | public class class_name {
public void computeMi() {
Mi.assign(0.0);
model.taggerFGen.startScanEFeatures();
while (model.taggerFGen.hasNextEFeature()) {
Feature f = model.taggerFGen.nextEFeature();
if (f.ftype == Feature.EDGE_FEATURE1) {
Mi.mtrx[f.yp][f.y] += model.lambda[f.idx] * f.val; // depends on control dependency: [if], data = [none]
}
}
for (int i = 0; i < Mi.rows; i++) {
for (int j = 0; j < Mi.cols; j++) {
Mi.mtrx[i][j] = Math.exp(Mi.mtrx[i][j]); // depends on control dependency: [for], data = [j]
}
}
} } |
public class class_name {
public void writeNext(String[] nextLine, boolean applyQuotesToAll) {
if (nextLine == null) {
return;
}
StringBuilder sb = new StringBuilder(nextLine.length * 2); // This is for the worse case where all elements have to be escaped.
for (int i = 0; i < nextLine.length; i++) {
if (i != 0) {
sb.append(separator);
}
String nextElement = nextLine[i];
if (nextElement == null) {
continue;
}
Boolean stringContainsSpecialCharacters = stringContainsSpecialCharacters(nextElement);
if ((applyQuotesToAll || stringContainsSpecialCharacters) && (quotechar != NO_QUOTE_CHARACTER)) {
sb.append(quotechar);
}
if (stringContainsSpecialCharacters) {
sb.append(processLine(nextElement));
} else {
sb.append(nextElement);
}
if ((applyQuotesToAll || stringContainsSpecialCharacters) && (quotechar != NO_QUOTE_CHARACTER)) {
sb.append(quotechar);
}
}
sb.append(lineEnd);
pw.write(sb.toString());
} } | public class class_name {
public void writeNext(String[] nextLine, boolean applyQuotesToAll) {
if (nextLine == null) {
return; // depends on control dependency: [if], data = [none]
}
StringBuilder sb = new StringBuilder(nextLine.length * 2); // This is for the worse case where all elements have to be escaped.
for (int i = 0; i < nextLine.length; i++) {
if (i != 0) {
sb.append(separator); // depends on control dependency: [if], data = [none]
}
String nextElement = nextLine[i];
if (nextElement == null) {
continue;
}
Boolean stringContainsSpecialCharacters = stringContainsSpecialCharacters(nextElement);
if ((applyQuotesToAll || stringContainsSpecialCharacters) && (quotechar != NO_QUOTE_CHARACTER)) {
sb.append(quotechar); // depends on control dependency: [if], data = [none]
}
if (stringContainsSpecialCharacters) {
sb.append(processLine(nextElement)); // depends on control dependency: [if], data = [none]
} else {
sb.append(nextElement); // depends on control dependency: [if], data = [none]
}
if ((applyQuotesToAll || stringContainsSpecialCharacters) && (quotechar != NO_QUOTE_CHARACTER)) {
sb.append(quotechar); // depends on control dependency: [if], data = [none]
}
}
sb.append(lineEnd);
pw.write(sb.toString());
} } |
public class class_name {
public double[] eval( double currentTimeInMinutes, double[] input, double[] rainArray, double[] etpArray,
boolean isAtFinalSubtimestep ) {
// the input's length is twice the number of links... the first half
// corresponds to links
// discharge and the second to hillslopes storage
// System.out.println(input.length);
// define the month
long currentTimeInMillis = (long) (currentTimeInMinutes * 60.0 * 1000.0);
int linksNum = orderedHillslopes.size(); // linksConectionStruct.headsArray.length;
// double mstold = 0.0;
double[] output = new double[input.length];
for( int i = linksNum - 1; i >= 0; i-- ) {
// start from the last pieces
HillSlopeDuffy currentHillslope = (HillSlopeDuffy) orderedHillslopes.get(i);
Parameters parameters = currentHillslope.getParameters();
/*
* NOTE: Initial conditions are ... input[i] for link discharge
* input[i+nLi] for link base flow input[i+2*nLi] for unsaturated
* hillslope S1 input[i+3*nLi] for saturated hillslope S2 . input[]
* is updated for each time step in DiffEqSolver.RKF .
*/
double prec_mphr = rainArray[i] / 1000.0; // input precipitation is in mm/h
double area_m2 = currentHillslope.getHillslopeArea();
// automatically in m2 from the features
/*
* Added some check for phisic consistency of the parameters
*/
// if (input[i + 3 * linksNum] != input[i + 3 * linksNum]) {
// System.out.println();
// }
double minsupdischarge = parameters.getqqsupmin() * currentHillslope.getUpstreamArea(null) / 1E6;
if (input[i] < minsupdischarge) {
input[i] = minsupdischarge;
// System.out
// .println(
// "Current superficial discharge is less than the minimum value, setted to it for the basin "
// + currentHillslope.getHillslopeId());
}
double minsubdischarge = parameters.getqqsubmin() * currentHillslope.getUpstreamArea(null) / 1E6;
if (input[i + linksNum] < minsubdischarge) {
input[i + linksNum] = minsubdischarge;
// System.out
// .println(
// "Current subsuperficial discharge is less than the minimum value, setted to it for the basin "
// + currentHillslope.getHillslopeId());
}
if (input[i + 2 * linksNum] < parameters.getS1residual()) {
input[i + 2 * linksNum] = parameters.getS1residual();
// System.out
// .println(
// "Current S1 parameter is less than the minimum value, setted to it for the basin "
// + currentHillslope.getHillslopeId());
}
if (input[i + 3 * linksNum] < parameters.getS2residual()) {
input[i + 3 * linksNum] = parameters.getS2residual();
// System.out
// .println(
// "Current S2 parameter is less than the minimum value, setted to it for the basin "
// + currentHillslope.getHillslopeId());
}
/* HILLSLOPE FLUX CONDITIONS */
satsurf = parameters.getS2Param() * (input[i + 3 * linksNum]); // dimless
// double areasat = satsurf * area_m2;
mst = (input[i + 2 * linksNum]) / (parameters.getS2max() - (input[i + 3 * linksNum])); // dimless
if (Double.isInfinite(mst)) {
mst = MSTMAX;
}
// if ((mst - mstold) > 0.01) {
// System.out.println("mst " + mst + "mstold " + mstold);
// mstold = mst;
// }
// Ku = hillSlopesInfo.Ks(currentHillslope)
// * (Math.pow(mst, hillSlopesInfo.MstExp(currentHillslope))); //
// mphr
/* HILLSLOPE S1-SURFACE FLUX VALUES */
if (prec_mphr < parameters.getKs()) {
inf = (1.0 - satsurf) * area_m2 * prec_mphr; // m3phr
qdh = 0.0; // m3phr
} else {
inf = (1.0 - satsurf) * area_m2 * parameters.getKs(); // m3phr
qdh = (1.0 - satsurf) * area_m2 * (prec_mphr - parameters.getKs()); // m3phr
}
Double eTrate = parameters.getETrate();
if (etpArray != null) {
qe1 = etpArray[i];
} else {
if (input[i + 2 * linksNum] > parameters.getS1residual()) {
qe1 = eTrate * area_m2 * (1.0 - satsurf) * mst; // m3phr
} else {
qe1 = 0.0;
}
}
/* HILLSLOPE S1-S2 FLUX VALUE */
// re = 1100.0
// * (input[i + 2 * linksNum] / parameters.getS2max())
// + 300.0
// * ((input[i + 2 * linksNum] / parameters.getS2max()) + 5)
// * Math.pow((input[i + 3 * linksNum] / parameters.getS2max()),
// 2.0);
re = parameters.getKs() * area_m2 * (1.0 - satsurf) * (Math.pow(mst, parameters.getMstExp())); // m3phr
/* HILLSLOPE S2-SURFACE FLUX VALUES */
qds = satsurf * area_m2 * prec_mphr; // m3phr
if (etpArray != null) {
qe2 = etpArray[i];
} else {
qe2 = eTrate * area_m2 * satsurf; // m3phr,
}
qs = parameters.getRecParam() * (input[i + 3 * linksNum]); // m3phr
/* HILLSLOPE DIRECT RUNOFF (TOTAL) FLUXES */
// System.out.println("qdh = " + qdh);
// System.out.println("qds = " + qds);
qd = qdh + qds; // m3phr
if (Double.isNaN(qs) || Double.isNaN(qd)) {
if (Double.isNaN(qs)) {
throw new ModelsIllegalargumentException("Subsuperficial discharge for the hillslope "
+ currentHillslope.getHillslopeId() + " " + i + " is NaN", this.getClass().getSimpleName(), pm);
} else {
throw new ModelsIllegalargumentException("Timestep " + currentTimeInMinutes
+ "Superficial discharge for the hillslope " + currentHillslope.getHillslopeId() + " " + i
+ " is NaN" + "\nValue of qdh " + qdh + "\nValue of qds " + qds + "\nPrecipitation " + prec_mphr
+ "\nSatsurf " + satsurf, this.getClass().getSimpleName(), pm);
}
}
if (isAtFinalSubtimestep) {
pm.message("timeinmin = " + currentTimeInMinutes + "\tbacino: " + i + "\tqdh = " + qdh + "\tqds = " + qds
+ "\tre = " + re + "\tqs = " + qs + "\tmst = " + mst + "\tinf = " + inf + "\tqe1 = " + qe1 + "\tqe2 = "
+ qe2);
}
/*
* if the area is > 0.1 km2, we consider the delay effect
* of the hillslope.
*/
if (area_m2 > THRESHOLD_AREA) {
// distribute the discharge
int hillslopeId = currentHillslope.getHillslopeId();
ADischargeDistributor dischargeDistributor = hillslopeId2DischargeDistributor.get(hillslopeId);
qs = dischargeDistributor.calculateSubsuperficialDischarge(qs, satsurf, currentTimeInMillis);
qd = dischargeDistributor.calculateSuperficialDischarge(qd, satsurf, currentTimeInMillis);
}
/* LINK FLUX ( Q ) */
/*
* Below, i=link#, j=id of connecting links, Array[i][j]=link# for
* connecting link
*/
/* LINK FLUX ( Q SUBSURFACE, BASE FLOW ) */
/*
* Below, i=link#, j=id of connecting links, Array[i][j]=link# for
* connecting link
*/
Q_trib = 0.0D;
Qs_trib = 0.0D;
List<IHillSlope> connectedUpstreamHillSlopes = currentHillslope.getConnectedUpstreamElements();
if (connectedUpstreamHillSlopes != null) {
for( IHillSlope hillSlope : connectedUpstreamHillSlopes ) {
PfafstetterNumber pNum = hillSlope.getPfafstetterNumber();
int index = orderedHillslopes.indexOf(hillSlope);
boolean doCalculate = true;
for( IDischargeContributor dContributor : dischargeContributorList ) {
Double contributedDischarge = dContributor.getDischarge(pNum.toString());
contributedDischarge = dContributor.mergeWithDischarge(contributedDischarge, input[index]);
if (!isNovalue(contributedDischarge)) {
if (doLog && doPrint) {
pm.message("----> For hillslope " + currentHillslope.getPfafstetterNumber()
+ " using hydrometer/dams data in pfafstetter: " + pNum.toString() + "(meaning added "
+ contributedDischarge + " instead of " + input[index] + ")");
}
double dischargeRatio = 0.3;// input[index] / (input[index] +
// input[index + linksNum]);
Q_trib = dischargeRatio * contributedDischarge; // units m^3/s
Qs_trib = contributedDischarge - Q_trib; // units m^3/s
doCalculate = false;
}
}
if (doCalculate) {
// at the same position we can query the input array
Q_trib += input[index]; // units m^3/s
Qs_trib += input[index + linksNum]; // units m^3/s
}
}
}
double K_Q = AdigeUtilities.doRouting(input[i], currentHillslope, routingType);
/*
* if (i == 62) { System.out.println(" WD ratio ="+
* linksHydraulicInfo.Width(i)/flowdepth); System.out.println("
* Mannings v (m/s) =" +
* (Math.pow(hydrad,2./3.)*Math.pow(linksHydraulicInfo.Slope(i),1/2.)/mannings_n) );
* System.out.println(" K_Q =" +
* (Math.pow(hydrad,2./3.)*Math.pow(linksHydraulicInfo.Slope(i),1/2.)/mannings_n)
* *Math.pow(linksHydraulicInfo.Length(i),-1) ); }
*/
if (input[i] == 0.0D)
K_Q = 1e-10;
if (Double.isNaN(qs) || Double.isNaN(qd)) {
pm.errorMessage("Problems in basin: " + currentHillslope.getHillslopeId() + " " + i); //$NON-NLS-1$ //$NON-NLS-2$
if (area_m2 < THRESHOLD_AREA) {
qd = 0.0;
qs = 0.0;
inf = 0.0;
qe1 = 0.0;
qe2 = 0.0;
re = 0.0;
System.out.println("All the contributes are set to zero.");
}
}
/* OUTPUT */
if (area_m2 > THRESHOLD_AREA) {
// LINK dQ/dt; big () term is m^3/s, 60*K_Q is 1/min
output[i] = 60.0D * K_Q * ((1.0D / 3600.) * qd + Q_trib - input[i]);
// 60.0 * K_Q * (Q_trib - input[i]) + (1.0 / 3600.0) * qd / deltaTinMinutes;
// LINK dQs/dt -> (m^3/s)/min
output[i + linksNum] = 60.0 * K_Q * (Qs_trib - input[i + linksNum]) + 60.0 * K_Q * (1.0 / 3600.) * (qs);
// HILLSLOPE dS1/dt -> m3/min
output[i + (2 * linksNum)] = (1.0 / 60.0) * (inf - re - qe1);
// HILLSLOPE dS2/dt -> m3/min
output[i + (3 * linksNum)] = (1.0 / 60.0) * (re - qs - qe2);
} else {
output[i] = 60.0D * K_Q * ((1.0D / 3600.) * qd + Q_trib - input[i]);
output[i + linksNum] = 60.0D * K_Q * ((1.0D / 3600.) * (qs) + Qs_trib - input[i + linksNum]);
output[i + (2 * linksNum)] = (1.0D / 60.0) * (inf - re - qe1);
if (output[i + (2 * linksNum)] != output[i + (2 * linksNum)] || output[i + (2 * linksNum)] == 0.0) {
throw new ModelsIllegalargumentException("Invalid value of S1, please check the parameters."
+ output[i + (2 * linksNum)], this, pm);
}
output[i + (3 * linksNum)] = (1.0D / 60.0) * (re - qs - qe2);
}
if (output[i + (3 * linksNum)] != output[i + (3 * linksNum)] || output[i + (2 * linksNum)] == 0.) {
throw new ModelsIllegalargumentException("Invalid value of S2, please check the parameters.", this.getClass()
.getSimpleName(), pm);
}
}
doPrint = false;
return output;
} } | public class class_name {
public double[] eval( double currentTimeInMinutes, double[] input, double[] rainArray, double[] etpArray,
boolean isAtFinalSubtimestep ) {
// the input's length is twice the number of links... the first half
// corresponds to links
// discharge and the second to hillslopes storage
// System.out.println(input.length);
// define the month
long currentTimeInMillis = (long) (currentTimeInMinutes * 60.0 * 1000.0);
int linksNum = orderedHillslopes.size(); // linksConectionStruct.headsArray.length;
// double mstold = 0.0;
double[] output = new double[input.length];
for( int i = linksNum - 1; i >= 0; i-- ) {
// start from the last pieces
HillSlopeDuffy currentHillslope = (HillSlopeDuffy) orderedHillslopes.get(i);
Parameters parameters = currentHillslope.getParameters();
/*
* NOTE: Initial conditions are ... input[i] for link discharge
* input[i+nLi] for link base flow input[i+2*nLi] for unsaturated
* hillslope S1 input[i+3*nLi] for saturated hillslope S2 . input[]
* is updated for each time step in DiffEqSolver.RKF .
*/
double prec_mphr = rainArray[i] / 1000.0; // input precipitation is in mm/h
double area_m2 = currentHillslope.getHillslopeArea();
// automatically in m2 from the features
/*
* Added some check for phisic consistency of the parameters
*/
// if (input[i + 3 * linksNum] != input[i + 3 * linksNum]) {
// System.out.println();
// }
double minsupdischarge = parameters.getqqsupmin() * currentHillslope.getUpstreamArea(null) / 1E6;
if (input[i] < minsupdischarge) {
input[i] = minsupdischarge; // depends on control dependency: [if], data = [none]
// System.out
// .println(
// "Current superficial discharge is less than the minimum value, setted to it for the basin "
// + currentHillslope.getHillslopeId());
}
double minsubdischarge = parameters.getqqsubmin() * currentHillslope.getUpstreamArea(null) / 1E6;
if (input[i + linksNum] < minsubdischarge) {
input[i + linksNum] = minsubdischarge; // depends on control dependency: [if], data = [none]
// System.out
// .println(
// "Current subsuperficial discharge is less than the minimum value, setted to it for the basin "
// + currentHillslope.getHillslopeId());
}
if (input[i + 2 * linksNum] < parameters.getS1residual()) {
input[i + 2 * linksNum] = parameters.getS1residual(); // depends on control dependency: [if], data = [none]
// System.out
// .println(
// "Current S1 parameter is less than the minimum value, setted to it for the basin "
// + currentHillslope.getHillslopeId());
}
if (input[i + 3 * linksNum] < parameters.getS2residual()) {
input[i + 3 * linksNum] = parameters.getS2residual(); // depends on control dependency: [if], data = [none]
// System.out
// .println(
// "Current S2 parameter is less than the minimum value, setted to it for the basin "
// + currentHillslope.getHillslopeId());
}
/* HILLSLOPE FLUX CONDITIONS */
satsurf = parameters.getS2Param() * (input[i + 3 * linksNum]); // dimless
// double areasat = satsurf * area_m2;
mst = (input[i + 2 * linksNum]) / (parameters.getS2max() - (input[i + 3 * linksNum])); // dimless
if (Double.isInfinite(mst)) {
mst = MSTMAX; // depends on control dependency: [if], data = [none]
}
// if ((mst - mstold) > 0.01) {
// System.out.println("mst " + mst + "mstold " + mstold);
// mstold = mst;
// }
// Ku = hillSlopesInfo.Ks(currentHillslope)
// * (Math.pow(mst, hillSlopesInfo.MstExp(currentHillslope))); //
// mphr
/* HILLSLOPE S1-SURFACE FLUX VALUES */
if (prec_mphr < parameters.getKs()) {
inf = (1.0 - satsurf) * area_m2 * prec_mphr; // m3phr // depends on control dependency: [if], data = [none]
qdh = 0.0; // m3phr // depends on control dependency: [if], data = [none]
} else {
inf = (1.0 - satsurf) * area_m2 * parameters.getKs(); // m3phr // depends on control dependency: [if], data = [none]
qdh = (1.0 - satsurf) * area_m2 * (prec_mphr - parameters.getKs()); // m3phr // depends on control dependency: [if], data = [(prec_mphr]
}
Double eTrate = parameters.getETrate();
if (etpArray != null) {
qe1 = etpArray[i]; // depends on control dependency: [if], data = [none]
} else {
if (input[i + 2 * linksNum] > parameters.getS1residual()) {
qe1 = eTrate * area_m2 * (1.0 - satsurf) * mst; // m3phr // depends on control dependency: [if], data = [none]
} else {
qe1 = 0.0; // depends on control dependency: [if], data = [none]
}
}
/* HILLSLOPE S1-S2 FLUX VALUE */
// re = 1100.0
// * (input[i + 2 * linksNum] / parameters.getS2max())
// + 300.0
// * ((input[i + 2 * linksNum] / parameters.getS2max()) + 5)
// * Math.pow((input[i + 3 * linksNum] / parameters.getS2max()),
// 2.0);
re = parameters.getKs() * area_m2 * (1.0 - satsurf) * (Math.pow(mst, parameters.getMstExp())); // m3phr
/* HILLSLOPE S2-SURFACE FLUX VALUES */
qds = satsurf * area_m2 * prec_mphr; // m3phr
if (etpArray != null) {
qe2 = etpArray[i]; // depends on control dependency: [if], data = [none]
} else {
qe2 = eTrate * area_m2 * satsurf; // m3phr, // depends on control dependency: [if], data = [none]
}
qs = parameters.getRecParam() * (input[i + 3 * linksNum]); // m3phr
/* HILLSLOPE DIRECT RUNOFF (TOTAL) FLUXES */
// System.out.println("qdh = " + qdh);
// System.out.println("qds = " + qds);
qd = qdh + qds; // m3phr
if (Double.isNaN(qs) || Double.isNaN(qd)) {
if (Double.isNaN(qs)) {
throw new ModelsIllegalargumentException("Subsuperficial discharge for the hillslope "
+ currentHillslope.getHillslopeId() + " " + i + " is NaN", this.getClass().getSimpleName(), pm);
} else {
throw new ModelsIllegalargumentException("Timestep " + currentTimeInMinutes
+ "Superficial discharge for the hillslope " + currentHillslope.getHillslopeId() + " " + i
+ " is NaN" + "\nValue of qdh " + qdh + "\nValue of qds " + qds + "\nPrecipitation " + prec_mphr
+ "\nSatsurf " + satsurf, this.getClass().getSimpleName(), pm);
}
}
if (isAtFinalSubtimestep) {
pm.message("timeinmin = " + currentTimeInMinutes + "\tbacino: " + i + "\tqdh = " + qdh + "\tqds = " + qds
+ "\tre = " + re + "\tqs = " + qs + "\tmst = " + mst + "\tinf = " + inf + "\tqe1 = " + qe1 + "\tqe2 = "
+ qe2); // depends on control dependency: [if], data = [none]
}
/*
* if the area is > 0.1 km2, we consider the delay effect
* of the hillslope.
*/
if (area_m2 > THRESHOLD_AREA) {
// distribute the discharge
int hillslopeId = currentHillslope.getHillslopeId();
ADischargeDistributor dischargeDistributor = hillslopeId2DischargeDistributor.get(hillslopeId);
qs = dischargeDistributor.calculateSubsuperficialDischarge(qs, satsurf, currentTimeInMillis); // depends on control dependency: [if], data = [none]
qd = dischargeDistributor.calculateSuperficialDischarge(qd, satsurf, currentTimeInMillis); // depends on control dependency: [if], data = [none]
}
/* LINK FLUX ( Q ) */
/*
* Below, i=link#, j=id of connecting links, Array[i][j]=link# for
* connecting link
*/
/* LINK FLUX ( Q SUBSURFACE, BASE FLOW ) */
/*
* Below, i=link#, j=id of connecting links, Array[i][j]=link# for
* connecting link
*/
Q_trib = 0.0D;
Qs_trib = 0.0D;
List<IHillSlope> connectedUpstreamHillSlopes = currentHillslope.getConnectedUpstreamElements();
if (connectedUpstreamHillSlopes != null) {
for( IHillSlope hillSlope : connectedUpstreamHillSlopes ) {
PfafstetterNumber pNum = hillSlope.getPfafstetterNumber();
int index = orderedHillslopes.indexOf(hillSlope);
boolean doCalculate = true;
for( IDischargeContributor dContributor : dischargeContributorList ) {
Double contributedDischarge = dContributor.getDischarge(pNum.toString());
contributedDischarge = dContributor.mergeWithDischarge(contributedDischarge, input[index]);
if (!isNovalue(contributedDischarge)) {
if (doLog && doPrint) {
pm.message("----> For hillslope " + currentHillslope.getPfafstetterNumber()
+ " using hydrometer/dams data in pfafstetter: " + pNum.toString() + "(meaning added "
+ contributedDischarge + " instead of " + input[index] + ")");
}
double dischargeRatio = 0.3;// input[index] / (input[index] +
// input[index + linksNum]);
Q_trib = dischargeRatio * contributedDischarge; // units m^3/s
Qs_trib = contributedDischarge - Q_trib; // units m^3/s
doCalculate = false;
}
}
if (doCalculate) {
// at the same position we can query the input array
Q_trib += input[index]; // units m^3/s
Qs_trib += input[index + linksNum]; // units m^3/s
}
}
}
double K_Q = AdigeUtilities.doRouting(input[i], currentHillslope, routingType);
/*
* if (i == 62) { System.out.println(" WD ratio ="+
* linksHydraulicInfo.Width(i)/flowdepth); System.out.println("
* Mannings v (m/s) =" +
* (Math.pow(hydrad,2./3.)*Math.pow(linksHydraulicInfo.Slope(i),1/2.)/mannings_n) );
* System.out.println(" K_Q =" +
* (Math.pow(hydrad,2./3.)*Math.pow(linksHydraulicInfo.Slope(i),1/2.)/mannings_n)
* *Math.pow(linksHydraulicInfo.Length(i),-1) ); }
*/
if (input[i] == 0.0D)
K_Q = 1e-10;
if (Double.isNaN(qs) || Double.isNaN(qd)) {
pm.errorMessage("Problems in basin: " + currentHillslope.getHillslopeId() + " " + i); //$NON-NLS-1$ //$NON-NLS-2$
if (area_m2 < THRESHOLD_AREA) {
qd = 0.0;
qs = 0.0;
inf = 0.0;
qe1 = 0.0;
qe2 = 0.0;
re = 0.0;
System.out.println("All the contributes are set to zero.");
}
}
/* OUTPUT */
if (area_m2 > THRESHOLD_AREA) {
// LINK dQ/dt; big () term is m^3/s, 60*K_Q is 1/min
output[i] = 60.0D * K_Q * ((1.0D / 3600.) * qd + Q_trib - input[i]);
// 60.0 * K_Q * (Q_trib - input[i]) + (1.0 / 3600.0) * qd / deltaTinMinutes;
// LINK dQs/dt -> (m^3/s)/min
output[i + linksNum] = 60.0 * K_Q * (Qs_trib - input[i + linksNum]) + 60.0 * K_Q * (1.0 / 3600.) * (qs);
// HILLSLOPE dS1/dt -> m3/min
output[i + (2 * linksNum)] = (1.0 / 60.0) * (inf - re - qe1);
// HILLSLOPE dS2/dt -> m3/min
output[i + (3 * linksNum)] = (1.0 / 60.0) * (re - qs - qe2);
} else {
output[i] = 60.0D * K_Q * ((1.0D / 3600.) * qd + Q_trib - input[i]);
output[i + linksNum] = 60.0D * K_Q * ((1.0D / 3600.) * (qs) + Qs_trib - input[i + linksNum]);
output[i + (2 * linksNum)] = (1.0D / 60.0) * (inf - re - qe1);
if (output[i + (2 * linksNum)] != output[i + (2 * linksNum)] || output[i + (2 * linksNum)] == 0.0) {
throw new ModelsIllegalargumentException("Invalid value of S1, please check the parameters."
+ output[i + (2 * linksNum)], this, pm);
}
output[i + (3 * linksNum)] = (1.0D / 60.0) * (re - qs - qe2);
}
if (output[i + (3 * linksNum)] != output[i + (3 * linksNum)] || output[i + (2 * linksNum)] == 0.) {
throw new ModelsIllegalargumentException("Invalid value of S2, please check the parameters.", this.getClass()
.getSimpleName(), pm);
}
}
doPrint = false;
return output;
} } |
public class class_name {
@Override
public String getGroupNameForWorkUnit(final DocWorkUnit workUnit) {
String groupName = workUnit.getDocumentedFeature().groupName();
if (groupName == null || groupName.isEmpty()) {
final CommandLineProgramGroup clpGroup = workUnit.getCommandLineProgramGroup();
if (clpGroup != null) {
groupName = clpGroup.getName();
}
if (groupName == null || groupName.isEmpty()) {
logger.warn("No group name declared for: " + workUnit.getClazz().getCanonicalName());
groupName = "";
}
}
return groupName;
} } | public class class_name {
@Override
public String getGroupNameForWorkUnit(final DocWorkUnit workUnit) {
String groupName = workUnit.getDocumentedFeature().groupName();
if (groupName == null || groupName.isEmpty()) {
final CommandLineProgramGroup clpGroup = workUnit.getCommandLineProgramGroup();
if (clpGroup != null) {
groupName = clpGroup.getName(); // depends on control dependency: [if], data = [none]
}
if (groupName == null || groupName.isEmpty()) {
logger.warn("No group name declared for: " + workUnit.getClazz().getCanonicalName()); // depends on control dependency: [if], data = [none]
groupName = ""; // depends on control dependency: [if], data = [none]
}
}
return groupName;
} } |
public class class_name {
@Override
protected Content getNavSummaryLink(TypeElement typeElement, boolean link) {
if (link) {
return writer.getHyperLink(SectionName.CONSTRUCTOR_SUMMARY,
contents.navConstructor);
} else {
return contents.navConstructor;
}
} } | public class class_name {
@Override
protected Content getNavSummaryLink(TypeElement typeElement, boolean link) {
if (link) {
return writer.getHyperLink(SectionName.CONSTRUCTOR_SUMMARY,
contents.navConstructor); // depends on control dependency: [if], data = [none]
} else {
return contents.navConstructor; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static void putStringNormalizedKey(
BinaryString value, MemorySegment target, int offset, int numBytes) {
final int limit = offset + numBytes;
final int end = value.getSizeInBytes();
for (int i = 0; i < end && offset < limit; i++) {
target.put(offset++, value.getByte(i));
}
for (int i = offset; i < limit; i++) {
target.put(i, (byte) 0);
}
} } | public class class_name {
public static void putStringNormalizedKey(
BinaryString value, MemorySegment target, int offset, int numBytes) {
final int limit = offset + numBytes;
final int end = value.getSizeInBytes();
for (int i = 0; i < end && offset < limit; i++) {
target.put(offset++, value.getByte(i)); // depends on control dependency: [for], data = [i]
}
for (int i = offset; i < limit; i++) {
target.put(i, (byte) 0); // depends on control dependency: [for], data = [i]
}
} } |
public class class_name {
public boolean contains(Object o) {
Iterator<E> iter = iterator();
while (iter.hasNext()) {
if (iter.next() == o) {
return true;
}
}
return false;
} } | public class class_name {
public boolean contains(Object o) {
Iterator<E> iter = iterator();
while (iter.hasNext()) {
if (iter.next() == o) {
return true;
// depends on control dependency: [if], data = [none]
}
}
return false;
} } |
public class class_name {
private static Collection<Set<Node>> prepareNodeSets(Set<BioPAXElement> elements, Graph graph)
{
Collection<Set<Node>> sets = new HashSet<Set<Node>>();
Map<BioPAXElement, Set<PhysicalEntity>> map = getRelatedPhysicalEntityMap(elements);
for (Set<PhysicalEntity> pes : map.values())
{
Set<Node> set = graph.getWrapperSet(pes);
if (!set.isEmpty()) sets.add(set);
}
// Add interactions in the seed as single node set
Set<Node> inters = getSeedInteractions(elements, graph);
for (Node node : inters)
{
sets.add(Collections.singleton(node));
}
return sets;
} } | public class class_name {
private static Collection<Set<Node>> prepareNodeSets(Set<BioPAXElement> elements, Graph graph)
{
Collection<Set<Node>> sets = new HashSet<Set<Node>>();
Map<BioPAXElement, Set<PhysicalEntity>> map = getRelatedPhysicalEntityMap(elements);
for (Set<PhysicalEntity> pes : map.values())
{
Set<Node> set = graph.getWrapperSet(pes);
if (!set.isEmpty()) sets.add(set);
}
// Add interactions in the seed as single node set
Set<Node> inters = getSeedInteractions(elements, graph);
for (Node node : inters)
{
sets.add(Collections.singleton(node)); // depends on control dependency: [for], data = [node]
}
return sets;
} } |
public class class_name {
protected void addSurplusRow( Example example, Example headers, Fixture rowFixtureAdapter)
{
Example row = example.addSibling();
for (int i = 0; i < headers.remainings(); i++)
{
ExpectedColumn column = (ExpectedColumn) columns[i];
Example cell = row.addChild();
try
{
Call call = new Call( rowFixtureAdapter.check( column.header() ) );
Object actual = call.execute();
cell.setContent( TypeConversion.toString(actual));
cell.annotate( Annotations.surplus() );
if (i == 0) // Notify test listener on first cell only
{
stats.wrong();
}
}
catch (Exception e)
{
// TODO: STATS count stats?
cell.annotate( ignored( e ) );
}
}
} } | public class class_name {
protected void addSurplusRow( Example example, Example headers, Fixture rowFixtureAdapter)
{
Example row = example.addSibling();
for (int i = 0; i < headers.remainings(); i++)
{
ExpectedColumn column = (ExpectedColumn) columns[i];
Example cell = row.addChild();
try
{
Call call = new Call( rowFixtureAdapter.check( column.header() ) );
Object actual = call.execute();
cell.setContent( TypeConversion.toString(actual)); // depends on control dependency: [try], data = [none]
cell.annotate( Annotations.surplus() ); // depends on control dependency: [try], data = [none]
if (i == 0) // Notify test listener on first cell only
{
stats.wrong(); // depends on control dependency: [if], data = [none]
}
}
catch (Exception e)
{
// TODO: STATS count stats?
cell.annotate( ignored( e ) );
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
private void postTouchEvent(MonocleWindow window,
View view,
TouchState newState) {
int count = countEvents(newState);
switch (count) {
case 0:
postNoPoints(view);
break;
case 1:
if (state.getPointCount() == 1) {
// There is one point and it already existed
TouchState.Point oldPoint = state.getPoint(0);
TouchState.Point newPoint = newState.getPointForID(
oldPoint.id);
if (newPoint != null) {
if (newPoint.x == oldPoint.x
&& newPoint.y == oldPoint.y) {
postPoint(window, view, TouchEvent.TOUCH_STILL, newPoint);
} else {
postPoint(window, view, TouchEvent.TOUCH_MOVED, newPoint);
}
} else {
postPoint(window, view, TouchEvent.TOUCH_RELEASED, oldPoint);
}
} else {
// There is one point and it is newly pressed
postPoint(window, view, TouchEvent.TOUCH_PRESSED, newState.getPoint(0));
}
break;
default: {
int[] states = new int[count];
int[] ids = new int[count];
int[] xs = new int[count];
int[] ys = new int[count];
for (int i = 0; i < state.getPointCount(); i++) {
TouchState.Point oldPoint = state.getPoint(i);
TouchState.Point newPoint = newState.getPointForID(
oldPoint.id);
if (newPoint != null) {
ids[i] = newPoint.id;
xs[i] = newPoint.x;
ys[i] = newPoint.y;
if (newPoint.x == oldPoint.x
&& newPoint.y == oldPoint.y) {
states[i] = TouchEvent.TOUCH_STILL;
} else {
states[i] = TouchEvent.TOUCH_MOVED;
}
} else {
states[i] = TouchEvent.TOUCH_RELEASED;
ids[i] = oldPoint.id;
xs[i] = oldPoint.x;
ys[i] = oldPoint.y;
}
}
// Once we have dealt with updates to old points, all that are left
// are new points.
for (int i = 0, j = state.getPointCount();
i < newState.getPointCount(); i++) {
TouchState.Point newPoint = newState.getPoint(i);
TouchState.Point oldPoint = state.getPointForID(
newPoint.id);
if (oldPoint == null) {
states[j] = TouchEvent.TOUCH_PRESSED;
ids[j] = newPoint.id;
xs[j] = newPoint.x;
ys[j] = newPoint.y;
j++;
}
}
postPoints(window, view, states, ids, xs, ys);
}
}
} } | public class class_name {
private void postTouchEvent(MonocleWindow window,
View view,
TouchState newState) {
int count = countEvents(newState);
switch (count) {
case 0:
postNoPoints(view);
break;
case 1:
if (state.getPointCount() == 1) {
// There is one point and it already existed
TouchState.Point oldPoint = state.getPoint(0);
TouchState.Point newPoint = newState.getPointForID(
oldPoint.id);
if (newPoint != null) {
if (newPoint.x == oldPoint.x
&& newPoint.y == oldPoint.y) {
postPoint(window, view, TouchEvent.TOUCH_STILL, newPoint); // depends on control dependency: [if], data = [none]
} else {
postPoint(window, view, TouchEvent.TOUCH_MOVED, newPoint); // depends on control dependency: [if], data = [none]
}
} else {
postPoint(window, view, TouchEvent.TOUCH_RELEASED, oldPoint); // depends on control dependency: [if], data = [none]
}
} else {
// There is one point and it is newly pressed
postPoint(window, view, TouchEvent.TOUCH_PRESSED, newState.getPoint(0)); // depends on control dependency: [if], data = [none]
}
break;
default: {
int[] states = new int[count];
int[] ids = new int[count];
int[] xs = new int[count];
int[] ys = new int[count];
for (int i = 0; i < state.getPointCount(); i++) {
TouchState.Point oldPoint = state.getPoint(i);
TouchState.Point newPoint = newState.getPointForID(
oldPoint.id);
if (newPoint != null) {
ids[i] = newPoint.id; // depends on control dependency: [if], data = [none]
xs[i] = newPoint.x; // depends on control dependency: [if], data = [none]
ys[i] = newPoint.y; // depends on control dependency: [if], data = [none]
if (newPoint.x == oldPoint.x
&& newPoint.y == oldPoint.y) {
states[i] = TouchEvent.TOUCH_STILL; // depends on control dependency: [if], data = [none]
} else {
states[i] = TouchEvent.TOUCH_MOVED; // depends on control dependency: [if], data = [none]
}
} else {
states[i] = TouchEvent.TOUCH_RELEASED; // depends on control dependency: [if], data = [none]
ids[i] = oldPoint.id; // depends on control dependency: [if], data = [none]
xs[i] = oldPoint.x; // depends on control dependency: [if], data = [none]
ys[i] = oldPoint.y; // depends on control dependency: [if], data = [none]
}
}
// Once we have dealt with updates to old points, all that are left
// are new points.
for (int i = 0, j = state.getPointCount();
i < newState.getPointCount(); i++) {
TouchState.Point newPoint = newState.getPoint(i);
TouchState.Point oldPoint = state.getPointForID(
newPoint.id);
if (oldPoint == null) {
states[j] = TouchEvent.TOUCH_PRESSED; // depends on control dependency: [if], data = [none]
ids[j] = newPoint.id; // depends on control dependency: [if], data = [none]
xs[j] = newPoint.x; // depends on control dependency: [if], data = [none]
ys[j] = newPoint.y; // depends on control dependency: [if], data = [none]
j++; // depends on control dependency: [if], data = [none]
}
}
postPoints(window, view, states, ids, xs, ys);
}
}
} } |
public class class_name {
protected <T extends ListBuffer<? super JCVariableDecl>> T variableDeclaratorsRest(int pos,
JCModifiers mods,
JCExpression type,
Name name,
boolean reqInit,
Comment dc,
T vdefs)
{
vdefs.append(variableDeclaratorRest(pos, mods, type, name, reqInit, dc));
while (token.kind == COMMA) {
// All but last of multiple declarators subsume a comma
storeEnd((JCTree)vdefs.last(), token.endPos);
nextToken();
vdefs.append(variableDeclarator(mods, type, reqInit, dc));
}
return vdefs;
} } | public class class_name {
protected <T extends ListBuffer<? super JCVariableDecl>> T variableDeclaratorsRest(int pos,
JCModifiers mods,
JCExpression type,
Name name,
boolean reqInit,
Comment dc,
T vdefs)
{
vdefs.append(variableDeclaratorRest(pos, mods, type, name, reqInit, dc));
while (token.kind == COMMA) {
// All but last of multiple declarators subsume a comma
storeEnd((JCTree)vdefs.last(), token.endPos); // depends on control dependency: [while], data = [none]
nextToken(); // depends on control dependency: [while], data = [none]
vdefs.append(variableDeclarator(mods, type, reqInit, dc)); // depends on control dependency: [while], data = [none]
}
return vdefs;
} } |
public class class_name {
void writeArrayItemStart() throws IOException {
if (commaState.get(commaDepth)) {
output.append(',');
if (newLine.length() > 0) {
output.append(' ');
}
} else {
commaState.set(commaDepth);
}
} } | public class class_name {
void writeArrayItemStart() throws IOException {
if (commaState.get(commaDepth)) {
output.append(',');
if (newLine.length() > 0) {
output.append(' '); // depends on control dependency: [if], data = [none]
}
} else {
commaState.set(commaDepth);
}
} } |
public class class_name {
public Assignments cluster(Matrix m,
int maxClusters,
Properties props) {
int startSize = Integer.parseInt(props.getProperty(
NUM_CLUSTERS_START, DEFAULT_NUM_CLUSTERS_START));
int numGaps = Integer.parseInt(props.getProperty(
NUM_REFERENCE_DATA_SETS, DEFAULT_NUM_REFERENCE_DATA_SETS));
int numIterations = maxClusters - startSize;
String criterion = props.getProperty(METHOD_PROPERTY, DEFAULT_METHOD);
verbose("Transforming the original data set");
Transform tfidf = new TfIdfDocStripedTransform();
Transform rowMag = new RowMagnitudeTransform();
m = rowMag.transform(tfidf.transform(m));
verbose("Generating the reference data set");
// Generate the reference data sets.
ReferenceDataGenerator generator = new ReferenceDataGenerator(m);
Matrix[] gapMatrices = new Matrix[numGaps];
for (int i = 0; i < numGaps; ++i)
gapMatrices[i] = rowMag.transform(tfidf.transform(
generator.generateTestData()));
double[] gapResults = new double[numIterations];
double[] gapStds = new double[numIterations];
Assignments[] gapAssignments = new Assignments[numIterations];
Assignments bestAssignments = null;
double bestGap = Double.NEGATIVE_INFINITY;
int bestK = 0;
// Compute the gap statistic for each iteration.
for (int i = 0; i < numIterations; ++i) {
clusterIteration(i, startSize, criterion, m, gapMatrices,
gapResults, gapStds, gapAssignments);
if (bestGap >= (gapResults[i] - gapStds[i])) {
break;
}
// Otherwise, continue clustering with higher values of k.
bestGap = gapResults[i];
bestAssignments = gapAssignments[i];
bestK = i + startSize;
}
return bestAssignments;
} } | public class class_name {
public Assignments cluster(Matrix m,
int maxClusters,
Properties props) {
int startSize = Integer.parseInt(props.getProperty(
NUM_CLUSTERS_START, DEFAULT_NUM_CLUSTERS_START));
int numGaps = Integer.parseInt(props.getProperty(
NUM_REFERENCE_DATA_SETS, DEFAULT_NUM_REFERENCE_DATA_SETS));
int numIterations = maxClusters - startSize;
String criterion = props.getProperty(METHOD_PROPERTY, DEFAULT_METHOD);
verbose("Transforming the original data set");
Transform tfidf = new TfIdfDocStripedTransform();
Transform rowMag = new RowMagnitudeTransform();
m = rowMag.transform(tfidf.transform(m));
verbose("Generating the reference data set");
// Generate the reference data sets.
ReferenceDataGenerator generator = new ReferenceDataGenerator(m);
Matrix[] gapMatrices = new Matrix[numGaps];
for (int i = 0; i < numGaps; ++i)
gapMatrices[i] = rowMag.transform(tfidf.transform(
generator.generateTestData()));
double[] gapResults = new double[numIterations];
double[] gapStds = new double[numIterations];
Assignments[] gapAssignments = new Assignments[numIterations];
Assignments bestAssignments = null;
double bestGap = Double.NEGATIVE_INFINITY;
int bestK = 0;
// Compute the gap statistic for each iteration.
for (int i = 0; i < numIterations; ++i) {
clusterIteration(i, startSize, criterion, m, gapMatrices,
gapResults, gapStds, gapAssignments); // depends on control dependency: [for], data = [i]
if (bestGap >= (gapResults[i] - gapStds[i])) {
break;
}
// Otherwise, continue clustering with higher values of k.
bestGap = gapResults[i]; // depends on control dependency: [for], data = [i]
bestAssignments = gapAssignments[i]; // depends on control dependency: [for], data = [i]
bestK = i + startSize; // depends on control dependency: [for], data = [i]
}
return bestAssignments;
} } |
public class class_name {
public ScaleLevel level(int minMarkers, int maxMarkers)
{
Iterator<ScaleLevel> iterator = scale.iterator(min, max);
ScaleLevel level = iterator.next();
ScaleLevel prev = null;
while (iterator.hasNext() && minMarkers > level.count(min, max))
{
prev = level;
level = iterator.next();
}
if (maxMarkers < level.count(min, max))
{
return prev;
}
else
{
return level;
}
} } | public class class_name {
public ScaleLevel level(int minMarkers, int maxMarkers)
{
Iterator<ScaleLevel> iterator = scale.iterator(min, max);
ScaleLevel level = iterator.next();
ScaleLevel prev = null;
while (iterator.hasNext() && minMarkers > level.count(min, max))
{
prev = level; // depends on control dependency: [while], data = [none]
level = iterator.next(); // depends on control dependency: [while], data = [none]
}
if (maxMarkers < level.count(min, max))
{
return prev; // depends on control dependency: [if], data = [none]
}
else
{
return level; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private int staticEncodingLength(GenericPropertyInfo info) {
TypeDesc type = info.getStorageType();
TypeDesc primType = type.toPrimitiveType();
if (primType == null) {
if (info.isLob()) {
// Lob locator is stored as a long.
return 8;
}
} else {
if (info.isNullable()) {
// Type is a primitive wrapper.
switch (primType.getTypeCode()) {
case TypeDesc.BYTE_CODE:
return ~1;
case TypeDesc.BOOLEAN_CODE:
return 1;
case TypeDesc.SHORT_CODE:
case TypeDesc.CHAR_CODE:
return ~1;
case TypeDesc.INT_CODE:
return ~1;
case TypeDesc.FLOAT_CODE:
return 4;
case TypeDesc.LONG_CODE:
return ~1;
case TypeDesc.DOUBLE_CODE:
return 8;
}
} else {
// Type is primitive or a required primitive wrapper.
switch (type.getTypeCode()) {
case TypeDesc.BYTE_CODE:
case TypeDesc.BOOLEAN_CODE:
return 1;
case TypeDesc.SHORT_CODE:
case TypeDesc.CHAR_CODE:
return 2;
case TypeDesc.INT_CODE:
case TypeDesc.FLOAT_CODE:
return 4;
case TypeDesc.LONG_CODE:
case TypeDesc.DOUBLE_CODE:
return 8;
}
}
}
return ~0;
} } | public class class_name {
private int staticEncodingLength(GenericPropertyInfo info) {
TypeDesc type = info.getStorageType();
TypeDesc primType = type.toPrimitiveType();
if (primType == null) {
if (info.isLob()) {
// Lob locator is stored as a long.
return 8;
// depends on control dependency: [if], data = [none]
}
} else {
if (info.isNullable()) {
// Type is a primitive wrapper.
switch (primType.getTypeCode()) {
case TypeDesc.BYTE_CODE:
return ~1;
case TypeDesc.BOOLEAN_CODE:
return 1;
case TypeDesc.SHORT_CODE:
case TypeDesc.CHAR_CODE:
return ~1;
case TypeDesc.INT_CODE:
return ~1;
case TypeDesc.FLOAT_CODE:
return 4;
case TypeDesc.LONG_CODE:
return ~1;
case TypeDesc.DOUBLE_CODE:
return 8;
}
} else {
// Type is primitive or a required primitive wrapper.
switch (type.getTypeCode()) {
case TypeDesc.BYTE_CODE:
case TypeDesc.BOOLEAN_CODE:
return 1;
case TypeDesc.SHORT_CODE:
case TypeDesc.CHAR_CODE:
return 2;
case TypeDesc.INT_CODE:
case TypeDesc.FLOAT_CODE:
return 4;
case TypeDesc.LONG_CODE:
case TypeDesc.DOUBLE_CODE:
return 8;
}
}
}
return ~0;
} } |
public class class_name {
public void init(boolean fromBackup) {
if (!fromBackup && store.isEnabled()) {
Set<Long> keys = store.loadAllKeys();
if (keys != null) {
long maxId = -1;
for (Long key : keys) {
QueueItem item = new QueueItem(this, key, null);
getItemQueue().offer(item);
maxId = Math.max(maxId, key);
}
idGenerator = maxId + 1;
}
}
} } | public class class_name {
public void init(boolean fromBackup) {
if (!fromBackup && store.isEnabled()) {
Set<Long> keys = store.loadAllKeys();
if (keys != null) {
long maxId = -1;
for (Long key : keys) {
QueueItem item = new QueueItem(this, key, null);
getItemQueue().offer(item); // depends on control dependency: [for], data = [none]
maxId = Math.max(maxId, key); // depends on control dependency: [for], data = [key]
}
idGenerator = maxId + 1; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public I_CmsPrincipal lookupPrincipal(CmsDbContext dbc, CmsUUID principalId) {
try {
CmsGroup group = getUserDriver(dbc).readGroup(dbc, principalId);
if (group != null) {
return group;
}
} catch (Exception e) {
// ignore this exception
}
try {
CmsUser user = readUser(dbc, principalId);
if (user != null) {
return user;
}
} catch (Exception e) {
// ignore this exception
}
return null;
} } | public class class_name {
public I_CmsPrincipal lookupPrincipal(CmsDbContext dbc, CmsUUID principalId) {
try {
CmsGroup group = getUserDriver(dbc).readGroup(dbc, principalId);
if (group != null) {
return group; // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
// ignore this exception
} // depends on control dependency: [catch], data = [none]
try {
CmsUser user = readUser(dbc, principalId);
if (user != null) {
return user; // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
// ignore this exception
} // depends on control dependency: [catch], data = [none]
return null;
} } |
public class class_name {
public List<TagCount> getTagsOnOrg(int orgId, int limit, String text) {
MultivaluedMap<String, String> params=new MultivaluedMapImpl();
params.add("limit", new Integer(limit).toString());
if ((text != null) && (!text.isEmpty())) {
params.add("text", text);
}
return getTagsOnOrg(orgId, params);
} } | public class class_name {
public List<TagCount> getTagsOnOrg(int orgId, int limit, String text) {
MultivaluedMap<String, String> params=new MultivaluedMapImpl();
params.add("limit", new Integer(limit).toString());
if ((text != null) && (!text.isEmpty())) {
params.add("text", text); // depends on control dependency: [if], data = [none]
}
return getTagsOnOrg(orgId, params);
} } |
public class class_name {
public void setDestinations(java.util.Collection<String> destinations) {
if (destinations == null) {
this.destinations = null;
return;
}
this.destinations = new com.amazonaws.internal.SdkInternalList<String>(destinations);
} } | public class class_name {
public void setDestinations(java.util.Collection<String> destinations) {
if (destinations == null) {
this.destinations = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.destinations = new com.amazonaws.internal.SdkInternalList<String>(destinations);
} } |
public class class_name {
public EEnum getObjectFunctionSetSpecificationObjType() {
if (objectFunctionSetSpecificationObjTypeEEnum == null) {
objectFunctionSetSpecificationObjTypeEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(102);
}
return objectFunctionSetSpecificationObjTypeEEnum;
} } | public class class_name {
public EEnum getObjectFunctionSetSpecificationObjType() {
if (objectFunctionSetSpecificationObjTypeEEnum == null) {
objectFunctionSetSpecificationObjTypeEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(102); // depends on control dependency: [if], data = [none]
}
return objectFunctionSetSpecificationObjTypeEEnum;
} } |
public class class_name {
public final boolean awaitUninterruptibly() {
try {
return await(15, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return false;
}
} } | public class class_name {
public final boolean awaitUninterruptibly() {
try {
return await(15, TimeUnit.SECONDS); // depends on control dependency: [try], data = [none]
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return false;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public void sendHttpRequest(final HttpRequest request) {
checkClosed();
if (trace) {
logger.debug(sputs("HTTP CLIENT: sendHttpRequest:: \n{}\n", request, "\nparams\n", request.params()));
}
final String uri = getURICreateParamsIfNeeded(request);
final HttpMethod httpMethod = HttpMethod.valueOf(request.getMethod());
final HttpClientRequest httpClientRequest = httpClient.request(
httpMethod, uri,
httpClientResponse -> handleResponse(request, httpClientResponse));
final MultiMap<String, String> headers = request.getHeaders();
httpClientRequest.exceptionHandler(error -> {
if (error instanceof ConnectException) {
closed.set(true);
try {
stop();
} catch (Exception ex) {
errorHandler.accept(ex);
logger.warn("Unable to stop client " +
"after failed connection", ex);
}
request.getReceiver().errorWithCode("\"Client connection was closed\"", HttpStatus.SERVICE_UNAVAILABLE);
logger.warn("Connection error", error);
} else {
logger.error("Unable to connect to " + host + " port " + port, error);
}
errorHandler.accept(error);
});
if (headers != null) {
for (String key : headers.keySet()) {
httpClientRequest.putHeader(key, headers.getAll(key));
}
}
final byte[] body = request.getBody();
if (keepAlive) {
httpClientRequest.putHeader(HttpHeaders.CONNECTION, HttpHeaders.KEEP_ALIVE);
}
if (body != null && body.length > 0) {
httpClientRequest.putHeader(HttpHeaders.CONTENT_LENGTH, Integer.toString(body.length));
if (request.getContentType() != null) {
httpClientRequest.putHeader("Content-Type", request.getContentType());
}
httpClientRequest.end(Buffer.buffer(request.getBody()));
} else {
httpClientRequest.end();
}
if (trace) logger.trace("HttpClientVertx::SENT \n{}", request);
} } | public class class_name {
@Override
public void sendHttpRequest(final HttpRequest request) {
checkClosed();
if (trace) {
logger.debug(sputs("HTTP CLIENT: sendHttpRequest:: \n{}\n", request, "\nparams\n", request.params())); // depends on control dependency: [if], data = [none]
}
final String uri = getURICreateParamsIfNeeded(request);
final HttpMethod httpMethod = HttpMethod.valueOf(request.getMethod());
final HttpClientRequest httpClientRequest = httpClient.request(
httpMethod, uri,
httpClientResponse -> handleResponse(request, httpClientResponse));
final MultiMap<String, String> headers = request.getHeaders();
httpClientRequest.exceptionHandler(error -> {
if (error instanceof ConnectException) {
closed.set(true);
try {
stop(); // depends on control dependency: [try], data = [none]
} catch (Exception ex) {
errorHandler.accept(ex);
logger.warn("Unable to stop client " +
"after failed connection", ex);
} // depends on control dependency: [catch], data = [none]
request.getReceiver().errorWithCode("\"Client connection was closed\"", HttpStatus.SERVICE_UNAVAILABLE);
logger.warn("Connection error", error);
} else {
logger.error("Unable to connect to " + host + " port " + port, error);
}
errorHandler.accept(error);
});
if (headers != null) {
for (String key : headers.keySet()) {
httpClientRequest.putHeader(key, headers.getAll(key));
}
}
final byte[] body = request.getBody();
if (keepAlive) {
httpClientRequest.putHeader(HttpHeaders.CONNECTION, HttpHeaders.KEEP_ALIVE);
}
if (body != null && body.length > 0) {
httpClientRequest.putHeader(HttpHeaders.CONTENT_LENGTH, Integer.toString(body.length));
if (request.getContentType() != null) {
httpClientRequest.putHeader("Content-Type", request.getContentType());
}
httpClientRequest.end(Buffer.buffer(request.getBody()));
} else {
httpClientRequest.end();
}
if (trace) logger.trace("HttpClientVertx::SENT \n{}", request);
} } |
public class class_name {
public static void loadLibrary(String baseName) {
String libName = LibUtils.createLibName(baseName);
Throwable throwable = null;
try {
loadLibraryResource(libName);
return;
} catch (Throwable t) {
throwable = t;
}
try {
System.loadLibrary(libName);
return;
} catch (Throwable t) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
pw.println("Error while loading native library \"" + libName + "\" with base name \"" + baseName + "\"");
pw.println("Operating system name: " + System.getProperty("os.name"));
pw.println("Architecture : " + System.getProperty("os.arch"));
pw.println("Architecture bit size: " + System.getProperty("sun.arch.data.model"));
if (throwable != null) {
pw.println("Stack trace from the attempt to " + "load the library as a resource:");
throwable.printStackTrace(pw);
}
pw.println("Stack trace from the attempt to " + "load the library as a file:");
t.printStackTrace(pw);
pw.flush();
pw.close();
throw new UnsatisfiedLinkError("Could not load the native library.\n" + sw.toString());
}
} } | public class class_name {
public static void loadLibrary(String baseName) {
String libName = LibUtils.createLibName(baseName);
Throwable throwable = null;
try {
loadLibraryResource(libName); // depends on control dependency: [try], data = [none]
return; // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
throwable = t;
} // depends on control dependency: [catch], data = [none]
try {
System.loadLibrary(libName);
return;
} catch (Throwable t) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
pw.println("Error while loading native library \"" + libName + "\" with base name \"" + baseName + "\"");
pw.println("Operating system name: " + System.getProperty("os.name"));
pw.println("Architecture : " + System.getProperty("os.arch"));
pw.println("Architecture bit size: " + System.getProperty("sun.arch.data.model"));
if (throwable != null) {
pw.println("Stack trace from the attempt to " + "load the library as a resource:");
throwable.printStackTrace(pw);
}
pw.println("Stack trace from the attempt to " + "load the library as a file:");
t.printStackTrace(pw);
pw.flush();
pw.close();
throw new UnsatisfiedLinkError("Could not load the native library.\n" + sw.toString());
}
} } |
public class class_name {
private static final String getadminObjectID(String application, String module, String component, String jndiName) {
StringBuilder sb = new StringBuilder(jndiName.length() + 80);
if (application != null) {
sb.append(AppDefinedResource.APPLICATION).append('[').append(application).append(']').append('/');
if (module != null) {
sb.append(AppDefinedResource.MODULE).append('[').append(module).append(']').append('/');
if (component != null)
sb.append(AppDefinedResource.COMPONENT).append('[').append(component).append(']').append('/');
}
}
return sb.append(AdminObjectService.ADMIN_OBJECT).append('[').append(jndiName).append(']').toString();
} } | public class class_name {
private static final String getadminObjectID(String application, String module, String component, String jndiName) {
StringBuilder sb = new StringBuilder(jndiName.length() + 80);
if (application != null) {
sb.append(AppDefinedResource.APPLICATION).append('[').append(application).append(']').append('/'); // depends on control dependency: [if], data = [(application]
if (module != null) {
sb.append(AppDefinedResource.MODULE).append('[').append(module).append(']').append('/'); // depends on control dependency: [if], data = [(module]
if (component != null)
sb.append(AppDefinedResource.COMPONENT).append('[').append(component).append(']').append('/');
}
}
return sb.append(AdminObjectService.ADMIN_OBJECT).append('[').append(jndiName).append(']').toString();
} } |
public class class_name {
@Override
public RTMPConnection removeConnection(String sessionId) {
log.debug("Removing connection with session id: {}", sessionId);
if (log.isTraceEnabled()) {
log.trace("Connections ({}) at pre-remove: {}", connMap.size(), connMap.values());
}
// remove from map
RTMPConnection conn = connMap.remove(sessionId);
if (conn != null) {
log.trace("Connections: {}", conns.decrementAndGet());
Red5.setConnectionLocal(null);
}
return conn;
} } | public class class_name {
@Override
public RTMPConnection removeConnection(String sessionId) {
log.debug("Removing connection with session id: {}", sessionId);
if (log.isTraceEnabled()) {
log.trace("Connections ({}) at pre-remove: {}", connMap.size(), connMap.values()); // depends on control dependency: [if], data = [none]
}
// remove from map
RTMPConnection conn = connMap.remove(sessionId);
if (conn != null) {
log.trace("Connections: {}", conns.decrementAndGet()); // depends on control dependency: [if], data = [none]
Red5.setConnectionLocal(null); // depends on control dependency: [if], data = [null)]
}
return conn;
} } |
public class class_name {
public void sequenceEvent(PublishRequest request, Runnable callback) {
if (requestSequence == responseSequence) {
log.trace("Completing {}", request);
callback.run();
eventIndex = request.eventIndex();
} else {
eventCallbacks.add(new EventCallback(request, callback));
completeResponses();
}
} } | public class class_name {
public void sequenceEvent(PublishRequest request, Runnable callback) {
if (requestSequence == responseSequence) {
log.trace("Completing {}", request); // depends on control dependency: [if], data = [none]
callback.run(); // depends on control dependency: [if], data = [none]
eventIndex = request.eventIndex(); // depends on control dependency: [if], data = [none]
} else {
eventCallbacks.add(new EventCallback(request, callback)); // depends on control dependency: [if], data = [none]
completeResponses(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static Action getActionByName(Entity entity, String name) {
if(entity == null) {
throw new IllegalArgumentException("entity cannot be null.");
}
if(StringUtils.isBlank(name)) {
throw new IllegalArgumentException("name cannot be null or empty.");
}
List<Action> actions = entity.getActions();
if (actions == null)
return null;
Action action = null;
for (Action a : actions) {
if (a.getName().equals(name)) {
action = a;
break;
}
}
return action;
} } | public class class_name {
public static Action getActionByName(Entity entity, String name) {
if(entity == null) {
throw new IllegalArgumentException("entity cannot be null.");
}
if(StringUtils.isBlank(name)) {
throw new IllegalArgumentException("name cannot be null or empty.");
}
List<Action> actions = entity.getActions();
if (actions == null)
return null;
Action action = null;
for (Action a : actions) {
if (a.getName().equals(name)) {
action = a;
// depends on control dependency: [if], data = [none]
break;
}
}
return action;
} } |
public class class_name {
public void processRequest(RequestEvent arg0) {
LOG.trace("request received !");
synchronized (listeners) {
Iterator<SipListener> iter = listeners.iterator();
while (iter.hasNext() == true) {
SipListener listener = iter.next();
LOG.trace("calling listener");
listener.processRequest(arg0);
}
}
} } | public class class_name {
public void processRequest(RequestEvent arg0) {
LOG.trace("request received !");
synchronized (listeners) {
Iterator<SipListener> iter = listeners.iterator();
while (iter.hasNext() == true) {
SipListener listener = iter.next();
LOG.trace("calling listener"); // depends on control dependency: [while], data = [none]
listener.processRequest(arg0); // depends on control dependency: [while], data = [none]
}
}
} } |
public class class_name {
public static List<String> splitSentence2(String text, SentenceDetector sd) {
List<String> sentences = new ArrayList<String>();
try {
int sentenceOffsets[] = sd.sentPosDetect(text);
int begin = 0;
int end = 0;
for (int i = 0; i < sentenceOffsets.length; i++) {
end = begin
+ (text.substring(begin, sentenceOffsets[i]).trim())
.length();
sentences.add(text.substring(begin, end));
begin = sentenceOffsets[i];
}
} catch (Exception e) {
LOG.warn("failed to extract sentences from text '" + text + "'", e);
}
return sentences;
} } | public class class_name {
public static List<String> splitSentence2(String text, SentenceDetector sd) {
List<String> sentences = new ArrayList<String>();
try {
int sentenceOffsets[] = sd.sentPosDetect(text);
int begin = 0;
int end = 0;
for (int i = 0; i < sentenceOffsets.length; i++) {
end = begin
+ (text.substring(begin, sentenceOffsets[i]).trim())
.length(); // depends on control dependency: [for], data = [none]
sentences.add(text.substring(begin, end)); // depends on control dependency: [for], data = [none]
begin = sentenceOffsets[i]; // depends on control dependency: [for], data = [i]
}
} catch (Exception e) {
LOG.warn("failed to extract sentences from text '" + text + "'", e);
} // depends on control dependency: [catch], data = [none]
return sentences;
} } |
public class class_name {
public Map<String,Object> propertiesForFunction() {
val fields = DifferentialFunctionClassHolder.getInstance().getFieldsForFunction(this);
Map<String,Object> ret = new LinkedHashMap<>();
for(val entry : fields.entrySet()) {
try {
ret.put(entry.getKey(),fields.get(entry.getKey()).get(this));
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
return ret;
} } | public class class_name {
public Map<String,Object> propertiesForFunction() {
val fields = DifferentialFunctionClassHolder.getInstance().getFieldsForFunction(this);
Map<String,Object> ret = new LinkedHashMap<>();
for(val entry : fields.entrySet()) {
try {
ret.put(entry.getKey(),fields.get(entry.getKey()).get(this)); // depends on control dependency: [try], data = [none]
} catch (IllegalAccessException e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
}
return ret;
} } |
public class class_name {
public void marshall(ListDeploymentJobsRequest listDeploymentJobsRequest, ProtocolMarshaller protocolMarshaller) {
if (listDeploymentJobsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listDeploymentJobsRequest.getFilters(), FILTERS_BINDING);
protocolMarshaller.marshall(listDeploymentJobsRequest.getNextToken(), NEXTTOKEN_BINDING);
protocolMarshaller.marshall(listDeploymentJobsRequest.getMaxResults(), MAXRESULTS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(ListDeploymentJobsRequest listDeploymentJobsRequest, ProtocolMarshaller protocolMarshaller) {
if (listDeploymentJobsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listDeploymentJobsRequest.getFilters(), FILTERS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listDeploymentJobsRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listDeploymentJobsRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void increaseMidVersion() {
try {
updateInternalHistory();
setMidVersion(getMidVersion() + 1);
setMinorVersion(0);
updateInternalHistory();
}
catch (Exception ex) {
Exceptions.printStackTrace(ex);
}
} } | public class class_name {
public void increaseMidVersion() {
try {
updateInternalHistory();
// depends on control dependency: [try], data = [none]
setMidVersion(getMidVersion() + 1);
// depends on control dependency: [try], data = [none]
setMinorVersion(0);
// depends on control dependency: [try], data = [none]
updateInternalHistory();
// depends on control dependency: [try], data = [none]
}
catch (Exception ex) {
Exceptions.printStackTrace(ex);
}
// depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static PluginConfiguration getPluginConfiguration(Context context, Class plugin) {
int index = getPluginIndex(context, plugin);
if (index > -1) {
return getConfigPlugins(context).get(index);
}
return null;
} } | public class class_name {
public static PluginConfiguration getPluginConfiguration(Context context, Class plugin) {
int index = getPluginIndex(context, plugin);
if (index > -1) {
return getConfigPlugins(context).get(index); // depends on control dependency: [if], data = [(index]
}
return null;
} } |
public class class_name {
private ByteBuf writeEncryptedData(final ByteBuffer src, int len) {
final int pos = src.position();
if (src.isDirect()) {
SSL.bioSetByteBuffer(networkBIO, bufferAddress(src) + pos, len, false);
} else {
final ByteBuf buf = alloc.directBuffer(len);
try {
final int limit = src.limit();
src.limit(pos + len);
buf.writeBytes(src);
// Restore the original position and limit because we don't want to consume from `src`.
src.position(pos);
src.limit(limit);
SSL.bioSetByteBuffer(networkBIO, memoryAddress(buf), len, false);
return buf;
} catch (Throwable cause) {
buf.release();
PlatformDependent.throwException(cause);
}
}
return null;
} } | public class class_name {
private ByteBuf writeEncryptedData(final ByteBuffer src, int len) {
final int pos = src.position();
if (src.isDirect()) {
SSL.bioSetByteBuffer(networkBIO, bufferAddress(src) + pos, len, false); // depends on control dependency: [if], data = [none]
} else {
final ByteBuf buf = alloc.directBuffer(len);
try {
final int limit = src.limit();
src.limit(pos + len); // depends on control dependency: [try], data = [none]
buf.writeBytes(src); // depends on control dependency: [try], data = [none]
// Restore the original position and limit because we don't want to consume from `src`.
src.position(pos); // depends on control dependency: [try], data = [none]
src.limit(limit); // depends on control dependency: [try], data = [none]
SSL.bioSetByteBuffer(networkBIO, memoryAddress(buf), len, false); // depends on control dependency: [try], data = [none]
return buf; // depends on control dependency: [try], data = [none]
} catch (Throwable cause) {
buf.release();
PlatformDependent.throwException(cause);
} // depends on control dependency: [catch], data = [none]
}
return null;
} } |
public class class_name {
public void interpret( Specification table )
{
Example row = table.nextExample();
Example check = row.firstChild();
Example expectedCell = check.lastSibling();
Action action = Action.parse( actionCells(row));
Example first = CollectionUtil.first(keywordCells(row));
try
{
Call call = action.checkAgainst( fixture );
call.expect( expectedCell.getContent() );
call.will( Annotate.right( expectedCell ) ).when( ResultIs.right() );
call.will( Annotate.wrongWithDetails( expectedCell ) ).when( ResultIs.wrong() );
call.will( Annotate.exception(first) ).when( ResultIs.exception() );
call.will( tallyStatistics( table ) );
call.execute();
}
catch (Exception e)
{
reportException( table );
if (first != null) {
first.annotate(Annotations.exception(e));
} else {
check.annotate(Annotations.exception(e));
}
}
} } | public class class_name {
public void interpret( Specification table )
{
Example row = table.nextExample();
Example check = row.firstChild();
Example expectedCell = check.lastSibling();
Action action = Action.parse( actionCells(row));
Example first = CollectionUtil.first(keywordCells(row));
try
{
Call call = action.checkAgainst( fixture );
call.expect( expectedCell.getContent() ); // depends on control dependency: [try], data = [none]
call.will( Annotate.right( expectedCell ) ).when( ResultIs.right() ); // depends on control dependency: [try], data = [none]
call.will( Annotate.wrongWithDetails( expectedCell ) ).when( ResultIs.wrong() ); // depends on control dependency: [try], data = [none]
call.will( Annotate.exception(first) ).when( ResultIs.exception() ); // depends on control dependency: [try], data = [exception]
call.will( tallyStatistics( table ) ); // depends on control dependency: [try], data = [none]
call.execute(); // depends on control dependency: [try], data = [none]
}
catch (Exception e)
{
reportException( table );
if (first != null) {
first.annotate(Annotations.exception(e)); // depends on control dependency: [if], data = [none]
} else {
check.annotate(Annotations.exception(e)); // depends on control dependency: [if], data = [none]
}
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Nonnull
@ReturnsMutableCopy
public ICommonsList <PDStoredBusinessEntity> getAllDocuments (@Nonnull final Query aQuery,
@CheckForSigned final int nMaxResultCount)
{
final ICommonsList <PDStoredBusinessEntity> aTargetList = new CommonsArrayList <> ();
try
{
searchAllDocuments (aQuery, nMaxResultCount, aTargetList::add);
}
catch (final IOException ex)
{
LOGGER.error ("Error searching for documents with query " + aQuery, ex);
}
return aTargetList;
} } | public class class_name {
@Nonnull
@ReturnsMutableCopy
public ICommonsList <PDStoredBusinessEntity> getAllDocuments (@Nonnull final Query aQuery,
@CheckForSigned final int nMaxResultCount)
{
final ICommonsList <PDStoredBusinessEntity> aTargetList = new CommonsArrayList <> ();
try
{
searchAllDocuments (aQuery, nMaxResultCount, aTargetList::add); // depends on control dependency: [try], data = [none]
}
catch (final IOException ex)
{
LOGGER.error ("Error searching for documents with query " + aQuery, ex);
} // depends on control dependency: [catch], data = [none]
return aTargetList;
} } |
public class class_name {
public static RequestPatternBuilder like(RequestPattern requestPattern) {
RequestPatternBuilder builder = new RequestPatternBuilder();
builder.url = requestPattern.getUrlMatcher();
builder.method = requestPattern.getMethod();
if (requestPattern.getHeaders() != null) {
builder.headers = requestPattern.getHeaders();
}
if (requestPattern.getQueryParameters() != null) {
builder.queryParams = requestPattern.getQueryParameters();
}
if (requestPattern.getCookies() != null) {
builder.cookies = requestPattern.getCookies();
}
if (requestPattern.getBodyPatterns() != null) {
builder.bodyPatterns = requestPattern.getBodyPatterns();
}
if (requestPattern.hasInlineCustomMatcher()) {
builder.customMatcher = requestPattern.getMatcher();
}
if (requestPattern.getMultipartPatterns() != null) {
builder.multiparts = requestPattern.getMultipartPatterns();
}
builder.basicCredentials = requestPattern.getBasicAuthCredentials();
builder.customMatcherDefinition = requestPattern.getCustomMatcher();
return builder;
} } | public class class_name {
public static RequestPatternBuilder like(RequestPattern requestPattern) {
RequestPatternBuilder builder = new RequestPatternBuilder();
builder.url = requestPattern.getUrlMatcher();
builder.method = requestPattern.getMethod();
if (requestPattern.getHeaders() != null) {
builder.headers = requestPattern.getHeaders(); // depends on control dependency: [if], data = [none]
}
if (requestPattern.getQueryParameters() != null) {
builder.queryParams = requestPattern.getQueryParameters(); // depends on control dependency: [if], data = [none]
}
if (requestPattern.getCookies() != null) {
builder.cookies = requestPattern.getCookies(); // depends on control dependency: [if], data = [none]
}
if (requestPattern.getBodyPatterns() != null) {
builder.bodyPatterns = requestPattern.getBodyPatterns(); // depends on control dependency: [if], data = [none]
}
if (requestPattern.hasInlineCustomMatcher()) {
builder.customMatcher = requestPattern.getMatcher(); // depends on control dependency: [if], data = [none]
}
if (requestPattern.getMultipartPatterns() != null) {
builder.multiparts = requestPattern.getMultipartPatterns(); // depends on control dependency: [if], data = [none]
}
builder.basicCredentials = requestPattern.getBasicAuthCredentials();
builder.customMatcherDefinition = requestPattern.getCustomMatcher();
return builder;
} } |
public class class_name {
public void register(Address address)
{
nodes.put(address, null);
if (address.getTransportId() == null || address.getTransportId().equals(getId()))
{
Set<T> sent = new HashSet<T>();
for (T addr : nodes.values())
{
if (addr != null && !sent.contains(addr))
{
sent.add(addr);
try
{
sendMessage(addr, Request.WORKMANAGER_ADD, address, (Serializable)getOwnAddress());
}
catch (Throwable t)
{
log.error("Register " + t.getMessage(), t);
}
}
}
}
} } | public class class_name {
public void register(Address address)
{
nodes.put(address, null);
if (address.getTransportId() == null || address.getTransportId().equals(getId()))
{
Set<T> sent = new HashSet<T>();
for (T addr : nodes.values())
{
if (addr != null && !sent.contains(addr))
{
sent.add(addr); // depends on control dependency: [if], data = [(addr]
try
{
sendMessage(addr, Request.WORKMANAGER_ADD, address, (Serializable)getOwnAddress()); // depends on control dependency: [try], data = [none]
}
catch (Throwable t)
{
log.error("Register " + t.getMessage(), t);
} // depends on control dependency: [catch], data = [none]
}
}
}
} } |
public class class_name {
public static String decodeString(String encoded,int offset,int length,String charset)
{
if (charset==null)
charset=StringUtil.__ISO_8859_1;
byte[] bytes=null;
int n=0;
for (int i=0;i<length;i++)
{
char c = encoded.charAt(offset+i);
if (c<0||c>0xff)
throw new IllegalArgumentException("Not encoded");
if (c=='+')
{
if (bytes==null)
{
bytes=new byte[length*2];
encoded.getBytes(offset, offset+i, bytes, 0);
n=i;
}
bytes[n++] = (byte) ' ';
}
else if (c=='%' && (i+2)<length)
{
byte b;
char cn = encoded.charAt(offset+i+1);
if (cn>='a' && cn<='z')
b=(byte)(10+cn-'a');
else if (cn>='A' && cn<='Z')
b=(byte)(10+cn-'A');
else
b=(byte)(cn-'0');
cn = encoded.charAt(offset+i+2);
if (cn>='a' && cn<='z')
b=(byte)(b*16+10+cn-'a');
else if (cn>='A' && cn<='Z')
b=(byte)(b*16+10+cn-'A');
else
b=(byte)(b*16+cn-'0');
if (bytes==null)
{
bytes=new byte[length*2];
encoded.getBytes(offset, offset+i, bytes, 0);
n=i;
}
i+=2;
bytes[n++]=b;
}
else if (n>0)
bytes[n++] = (byte) c;
}
if (bytes==null)
{
if (offset==0 && encoded.length()==length)
return encoded;
return encoded.substring(offset,offset+length);
}
try
{
return new String(bytes,0,n,charset);
}
catch (UnsupportedEncodingException e)
{
return new String(bytes,0,n);
}
} } | public class class_name {
public static String decodeString(String encoded,int offset,int length,String charset)
{
if (charset==null)
charset=StringUtil.__ISO_8859_1;
byte[] bytes=null;
int n=0;
for (int i=0;i<length;i++)
{
char c = encoded.charAt(offset+i);
if (c<0||c>0xff)
throw new IllegalArgumentException("Not encoded");
if (c=='+')
{
if (bytes==null)
{
bytes=new byte[length*2]; // depends on control dependency: [if], data = [none]
encoded.getBytes(offset, offset+i, bytes, 0); // depends on control dependency: [if], data = [none]
n=i; // depends on control dependency: [if], data = [none]
}
bytes[n++] = (byte) ' '; // depends on control dependency: [if], data = [none]
}
else if (c=='%' && (i+2)<length)
{
byte b;
char cn = encoded.charAt(offset+i+1);
if (cn>='a' && cn<='z')
b=(byte)(10+cn-'a');
else if (cn>='A' && cn<='Z')
b=(byte)(10+cn-'A');
else
b=(byte)(cn-'0');
cn = encoded.charAt(offset+i+2); // depends on control dependency: [if], data = [none]
if (cn>='a' && cn<='z')
b=(byte)(b*16+10+cn-'a');
else if (cn>='A' && cn<='Z')
b=(byte)(b*16+10+cn-'A');
else
b=(byte)(b*16+cn-'0');
if (bytes==null)
{
bytes=new byte[length*2]; // depends on control dependency: [if], data = [none]
encoded.getBytes(offset, offset+i, bytes, 0); // depends on control dependency: [if], data = [none]
n=i; // depends on control dependency: [if], data = [none]
}
i+=2; // depends on control dependency: [if], data = [none]
bytes[n++]=b; // depends on control dependency: [if], data = [none]
}
else if (n>0)
bytes[n++] = (byte) c;
}
if (bytes==null)
{
if (offset==0 && encoded.length()==length)
return encoded;
return encoded.substring(offset,offset+length); // depends on control dependency: [if], data = [none]
}
try
{
return new String(bytes,0,n,charset); // depends on control dependency: [try], data = [none]
}
catch (UnsupportedEncodingException e)
{
return new String(bytes,0,n);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
void translate() {
make.at(pos.getStartPosition());
JCClassDecl owner = classDef((ClassSymbol)mapVar.owner);
// synthetic static final int[] $SwitchMap$Color = new int[Color.values().length];
MethodSymbol valuesMethod = lookupMethod(pos,
names.values,
forEnum.type,
List.nil());
JCExpression size = make // Color.values().length
.Select(make.App(make.QualIdent(valuesMethod)),
syms.lengthVar);
JCExpression mapVarInit = make
.NewArray(make.Type(syms.intType), List.of(size), null)
.setType(new ArrayType(syms.intType, syms.arrayClass));
// try { $SwitchMap$Color[red.ordinal()] = 1; } catch (java.lang.NoSuchFieldError ex) {}
ListBuffer<JCStatement> stmts = new ListBuffer<>();
Symbol ordinalMethod = lookupMethod(pos,
names.ordinal,
forEnum.type,
List.nil());
List<JCCatch> catcher = List.<JCCatch>nil()
.prepend(make.Catch(make.VarDef(new VarSymbol(PARAMETER, names.ex,
syms.noSuchFieldErrorType,
syms.noSymbol),
null),
make.Block(0, List.nil())));
for (Map.Entry<VarSymbol,Integer> e : values.entrySet()) {
VarSymbol enumerator = e.getKey();
Integer mappedValue = e.getValue();
JCExpression assign = make
.Assign(make.Indexed(mapVar,
make.App(make.Select(make.QualIdent(enumerator),
ordinalMethod))),
make.Literal(mappedValue))
.setType(syms.intType);
JCStatement exec = make.Exec(assign);
JCStatement _try = make.Try(make.Block(0, List.of(exec)), catcher, null);
stmts.append(_try);
}
owner.defs = owner.defs
.prepend(make.Block(STATIC, stmts.toList()))
.prepend(make.VarDef(mapVar, mapVarInit));
} } | public class class_name {
void translate() {
make.at(pos.getStartPosition());
JCClassDecl owner = classDef((ClassSymbol)mapVar.owner);
// synthetic static final int[] $SwitchMap$Color = new int[Color.values().length];
MethodSymbol valuesMethod = lookupMethod(pos,
names.values,
forEnum.type,
List.nil());
JCExpression size = make // Color.values().length
.Select(make.App(make.QualIdent(valuesMethod)),
syms.lengthVar);
JCExpression mapVarInit = make
.NewArray(make.Type(syms.intType), List.of(size), null)
.setType(new ArrayType(syms.intType, syms.arrayClass));
// try { $SwitchMap$Color[red.ordinal()] = 1; } catch (java.lang.NoSuchFieldError ex) {}
ListBuffer<JCStatement> stmts = new ListBuffer<>();
Symbol ordinalMethod = lookupMethod(pos,
names.ordinal,
forEnum.type,
List.nil());
List<JCCatch> catcher = List.<JCCatch>nil()
.prepend(make.Catch(make.VarDef(new VarSymbol(PARAMETER, names.ex,
syms.noSuchFieldErrorType,
syms.noSymbol),
null),
make.Block(0, List.nil())));
for (Map.Entry<VarSymbol,Integer> e : values.entrySet()) {
VarSymbol enumerator = e.getKey();
Integer mappedValue = e.getValue();
JCExpression assign = make
.Assign(make.Indexed(mapVar,
make.App(make.Select(make.QualIdent(enumerator),
ordinalMethod))),
make.Literal(mappedValue))
.setType(syms.intType);
JCStatement exec = make.Exec(assign);
JCStatement _try = make.Try(make.Block(0, List.of(exec)), catcher, null);
stmts.append(_try); // depends on control dependency: [for], data = [e]
}
owner.defs = owner.defs
.prepend(make.Block(STATIC, stmts.toList()))
.prepend(make.VarDef(mapVar, mapVarInit));
} } |
public class class_name {
public static String getPOSTagModelFeaturesRange(
final TrainingParameters params) {
String lemmaRangeFlag = null;
if (params.getSettings().get("POSTagModelFeaturesRange") != null) {
lemmaRangeFlag = params.getSettings().get("POSTagModelFeaturesRange");
} else {
lemmaRangeFlag = Flags.DEFAULT_POSTAG_RANGE;
}
return lemmaRangeFlag;
} } | public class class_name {
public static String getPOSTagModelFeaturesRange(
final TrainingParameters params) {
String lemmaRangeFlag = null;
if (params.getSettings().get("POSTagModelFeaturesRange") != null) {
lemmaRangeFlag = params.getSettings().get("POSTagModelFeaturesRange"); // depends on control dependency: [if], data = [none]
} else {
lemmaRangeFlag = Flags.DEFAULT_POSTAG_RANGE; // depends on control dependency: [if], data = [none]
}
return lemmaRangeFlag;
} } |
public class class_name {
public boolean offer(T t) {
boolean retval=size() < capacity && super.offer(t);
if(waiting_takers.get() > 0) {
lock.lock();
try {
not_empty.signal();
}
finally {
lock.unlock();
}
}
return retval;
} } | public class class_name {
public boolean offer(T t) {
boolean retval=size() < capacity && super.offer(t);
if(waiting_takers.get() > 0) {
lock.lock(); // depends on control dependency: [if], data = [none]
try {
not_empty.signal(); // depends on control dependency: [try], data = [none]
}
finally {
lock.unlock();
}
}
return retval;
} } |
public class class_name {
private void findUsesComponentAnnotations(Component component, String typeName) {
try {
Class type = getTypeRepository().loadClass(typeName);
for (Field field : type.getDeclaredFields()) {
UsesComponent annotation = field.getAnnotation(UsesComponent.class);
if (annotation != null) {
String name = field.getType().getCanonicalName();
String description = field.getAnnotation(UsesComponent.class).description();
String technology = annotation.technology();
Component destination = componentFinder.getContainer().getComponentOfType(name);
if (destination != null) {
for (Relationship relationship : component.getRelationships()) {
if (relationship.getDestination() == destination && StringUtils.isNullOrEmpty(relationship.getDescription())) {
// only change the details of relationships that have no description
component.getModel().modifyRelationship(relationship, description, technology);
}
}
} else {
log.warn("A component of type \"" + name + "\" could not be found.");
}
}
}
} catch (ClassNotFoundException e) {
log.warn("Could not load type " + typeName);
}
} } | public class class_name {
private void findUsesComponentAnnotations(Component component, String typeName) {
try {
Class type = getTypeRepository().loadClass(typeName);
for (Field field : type.getDeclaredFields()) {
UsesComponent annotation = field.getAnnotation(UsesComponent.class);
if (annotation != null) {
String name = field.getType().getCanonicalName();
String description = field.getAnnotation(UsesComponent.class).description();
String technology = annotation.technology();
Component destination = componentFinder.getContainer().getComponentOfType(name);
if (destination != null) {
for (Relationship relationship : component.getRelationships()) {
if (relationship.getDestination() == destination && StringUtils.isNullOrEmpty(relationship.getDescription())) {
// only change the details of relationships that have no description
component.getModel().modifyRelationship(relationship, description, technology); // depends on control dependency: [if], data = [none]
}
}
} else {
log.warn("A component of type \"" + name + "\" could not be found."); // depends on control dependency: [if], data = [none]
}
}
}
} catch (ClassNotFoundException e) {
log.warn("Could not load type " + typeName);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
final boolean abort(boolean removeFromQueue, Throwable cause) {
if (removeFromQueue && executor.queue.remove(this))
executor.maxQueueSizeConstraint.release();
if (nsAcceptEnd == nsAcceptBegin - 1) // currently unset
nsRunEnd = nsQueueEnd = nsAcceptEnd = System.nanoTime();
boolean aborted = result.compareAndSet(state, cause);
if (aborted)
try {
state.releaseShared(ABORTED);
if (nsQueueEnd == nsAcceptBegin - 2) // currently unset
nsRunEnd = nsQueueEnd = System.nanoTime();
if (callback != null)
callback.onEnd(task, this, null, true, 0, cause);
} finally {
if (latch != null)
latch.countDown();
if (cancellableStage != null) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(this, tc, "completion stage to complete exceptionally: " + cancellableStage);
cancellableStage.completeExceptionally(cause);
}
}
else {
// Prevent premature return from abort that would allow subsequent getState() to indicate
// that the task is still in SUBMITTED state.
while (state.get() < RUNNING)
Thread.yield();
}
return aborted;
} } | public class class_name {
final boolean abort(boolean removeFromQueue, Throwable cause) {
if (removeFromQueue && executor.queue.remove(this))
executor.maxQueueSizeConstraint.release();
if (nsAcceptEnd == nsAcceptBegin - 1) // currently unset
nsRunEnd = nsQueueEnd = nsAcceptEnd = System.nanoTime();
boolean aborted = result.compareAndSet(state, cause);
if (aborted)
try {
state.releaseShared(ABORTED); // depends on control dependency: [try], data = [none]
if (nsQueueEnd == nsAcceptBegin - 2) // currently unset
nsRunEnd = nsQueueEnd = System.nanoTime();
if (callback != null)
callback.onEnd(task, this, null, true, 0, cause);
} finally {
if (latch != null)
latch.countDown();
if (cancellableStage != null) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(this, tc, "completion stage to complete exceptionally: " + cancellableStage);
cancellableStage.completeExceptionally(cause); // depends on control dependency: [if], data = [none]
}
}
else {
// Prevent premature return from abort that would allow subsequent getState() to indicate
// that the task is still in SUBMITTED state.
while (state.get() < RUNNING)
Thread.yield();
}
return aborted;
} } |
public class class_name {
@SuppressWarnings("unchecked")
@SafeVarargs
@Nonnull
public static <T> Predicate<T> and(final Predicate<? extends T>... ps) {
return c -> {
for (final Predicate<? extends T> p : ps) {
if (!((Predicate<T>) p).test(c)) {
return false;
}
}
return true;
};
} } | public class class_name {
@SuppressWarnings("unchecked")
@SafeVarargs
@Nonnull
public static <T> Predicate<T> and(final Predicate<? extends T>... ps) {
return c -> {
for (final Predicate<? extends T> p : ps) {
if (!((Predicate<T>) p).test(c)) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
};
} } |
public class class_name {
private static boolean hasClausalV(Tree tree) {
// this is originally called only called on phrasal nodes
if (tree.isPhrasal()) {
if (tree.isPrePreTerminal() &&
tree.value().startsWith("NP")) {
return false;
}
Tree[] kids = tree.children();
for (Tree t : kids) {
if (hasClausalV(t)) {
return true;
}
}
return false;
} else {
String str = tree.value();
return str.startsWith("VB") || str.startsWith("MD");
}
} } | public class class_name {
private static boolean hasClausalV(Tree tree) {
// this is originally called only called on phrasal nodes
if (tree.isPhrasal()) {
if (tree.isPrePreTerminal() &&
tree.value().startsWith("NP")) {
return false;
// depends on control dependency: [if], data = [none]
}
Tree[] kids = tree.children();
for (Tree t : kids) {
if (hasClausalV(t)) {
return true;
// depends on control dependency: [if], data = [none]
}
}
return false;
// depends on control dependency: [if], data = [none]
} else {
String str = tree.value();
return str.startsWith("VB") || str.startsWith("MD");
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static <K, V> ListMultimap<K, V> unmodifiableListMultimap(ListMultimap<K, V> delegate) {
if (delegate instanceof UnmodifiableListMultimap || delegate instanceof ImmutableListMultimap) {
return delegate;
}
return new UnmodifiableListMultimap<>(delegate);
} } | public class class_name {
public static <K, V> ListMultimap<K, V> unmodifiableListMultimap(ListMultimap<K, V> delegate) {
if (delegate instanceof UnmodifiableListMultimap || delegate instanceof ImmutableListMultimap) {
return delegate; // depends on control dependency: [if], data = [none]
}
return new UnmodifiableListMultimap<>(delegate);
} } |
public class class_name {
private Mono<String> getTokenFromUri(String uri) {
URL url = null;
try {
url = new URL(uri);
} catch (MalformedURLException e) {
return Mono.error(e);
}
String host = String.format("%s://%s%s/", url.getProtocol(), url.getHost(), url.getPort() > 0 ? ":" + url.getPort() : "");
String resource = environment().managementEndpoint();
for (Map.Entry<String, String> endpoint : environment().endpoints().entrySet()) {
if (host.contains(endpoint.getValue())) {
if (endpoint.getKey().equals(Endpoint.KEYVAULT.identifier())) {
resource = String.format("https://%s/", endpoint.getValue().replaceAll("^\\.*", ""));
break;
} else if (endpoint.getKey().equals(Endpoint.GRAPH.identifier())) {
resource = environment().graphEndpoint();
break;
} else if (endpoint.getKey().equals(Endpoint.LOG_ANALYTICS.identifier())) {
resource = environment().logAnalyticsEndpoint();
break;
} else if (endpoint.getKey().equals(Endpoint.APPLICATION_INSIGHTS.identifier())) {
resource = environment().applicationInsightsEndpoint();
break;
} else if (endpoint.getKey().equals(Endpoint.DATA_LAKE_STORE.identifier())
|| endpoint.getKey().equals(Endpoint.DATA_LAKE_ANALYTICS.identifier())) {
resource = environment().dataLakeEndpointResourceId();
break;
}
}
}
return getToken(resource);
} } | public class class_name {
private Mono<String> getTokenFromUri(String uri) {
URL url = null;
try {
url = new URL(uri); // depends on control dependency: [try], data = [none]
} catch (MalformedURLException e) {
return Mono.error(e);
} // depends on control dependency: [catch], data = [none]
String host = String.format("%s://%s%s/", url.getProtocol(), url.getHost(), url.getPort() > 0 ? ":" + url.getPort() : "");
String resource = environment().managementEndpoint();
for (Map.Entry<String, String> endpoint : environment().endpoints().entrySet()) {
if (host.contains(endpoint.getValue())) {
if (endpoint.getKey().equals(Endpoint.KEYVAULT.identifier())) {
resource = String.format("https://%s/", endpoint.getValue().replaceAll("^\\.*", ""));
break; // depends on control dependency: [if], data = [none]
} else if (endpoint.getKey().equals(Endpoint.GRAPH.identifier())) {
resource = environment().graphEndpoint(); // depends on control dependency: [if], data = [none]
break;
} else if (endpoint.getKey().equals(Endpoint.LOG_ANALYTICS.identifier())) {
resource = environment().logAnalyticsEndpoint(); // depends on control dependency: [if], data = [none]
break;
} else if (endpoint.getKey().equals(Endpoint.APPLICATION_INSIGHTS.identifier())) {
resource = environment().applicationInsightsEndpoint(); // depends on control dependency: [if], data = [none]
break;
} else if (endpoint.getKey().equals(Endpoint.DATA_LAKE_STORE.identifier())
|| endpoint.getKey().equals(Endpoint.DATA_LAKE_ANALYTICS.identifier())) {
resource = environment().dataLakeEndpointResourceId(); // depends on control dependency: [if], data = [none]
break;
}
}
}
return getToken(resource);
} } |
public class class_name {
@Deprecated
void execute(String sql, Object... parameters) {
if (N.isNullOrEmpty(parameters)) {
sqliteDB.execSQL(sql);
} else {
final NamedSQL namedSQL = parseSQL(sql);
final Object[] args = prepareArguments(namedSQL, parameters);
sqliteDB.execSQL(namedSQL.getPureSQL(), args);
}
} } | public class class_name {
@Deprecated
void execute(String sql, Object... parameters) {
if (N.isNullOrEmpty(parameters)) {
sqliteDB.execSQL(sql);
// depends on control dependency: [if], data = [none]
} else {
final NamedSQL namedSQL = parseSQL(sql);
final Object[] args = prepareArguments(namedSQL, parameters);
sqliteDB.execSQL(namedSQL.getPureSQL(), args);
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private void cancelPausedRun(ExecutionState executionStateToCancel) {
final List<ExecutionState> branches = executionStateService.readByExecutionId(executionStateToCancel.getExecutionId());
// If the parent is paused because one of the branches is paused, OR, it was paused by the user / no-workers-in-group, but has branches that were not finished (and thus, were paused) -
// The parent itself will return to the queue after all the branches are ended (due to this cancellation), and then it'll be canceled as well.
if (branches.size() > 1) { // more than 1 means that it has paused branches (branches is at least 1 - the parent)
for (ExecutionState branch : branches) {
if (!EMPTY_BRANCH.equals(branch.getBranchId())) { // exclude the base execution
returnCanceledRunToQueue(branch);
executionStateService.deleteExecutionState(branch.getExecutionId(), branch.getBranchId());
}
}
executionStateToCancel.setStatus(ExecutionStatus.PENDING_CANCEL); // when the parent will return to queue - should have the correct status
} else {
returnCanceledRunToQueue(executionStateToCancel);
}
} } | public class class_name {
private void cancelPausedRun(ExecutionState executionStateToCancel) {
final List<ExecutionState> branches = executionStateService.readByExecutionId(executionStateToCancel.getExecutionId());
// If the parent is paused because one of the branches is paused, OR, it was paused by the user / no-workers-in-group, but has branches that were not finished (and thus, were paused) -
// The parent itself will return to the queue after all the branches are ended (due to this cancellation), and then it'll be canceled as well.
if (branches.size() > 1) { // more than 1 means that it has paused branches (branches is at least 1 - the parent)
for (ExecutionState branch : branches) {
if (!EMPTY_BRANCH.equals(branch.getBranchId())) { // exclude the base execution
returnCanceledRunToQueue(branch); // depends on control dependency: [if], data = [none]
executionStateService.deleteExecutionState(branch.getExecutionId(), branch.getBranchId()); // depends on control dependency: [if], data = [none]
}
}
executionStateToCancel.setStatus(ExecutionStatus.PENDING_CANCEL); // when the parent will return to queue - should have the correct status // depends on control dependency: [if], data = [none]
} else {
returnCanceledRunToQueue(executionStateToCancel); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static BeanBox getUniqueBeanBox(BeanBoxContext ctx, Class<?> clazz) {
BeanBoxException.assureNotNull(clazz, "Target class can not be null");
BeanBox box = ctx.beanBoxMetaCache.get(clazz);
if (box != null)
return box;
if (BeanBox.class.isAssignableFrom(clazz))
try {
box = (BeanBox) clazz.newInstance();
if (box.singleton == null)
box.singleton = true;
} catch (Exception e) {
BeanBoxException.throwEX(e);
}
else
box = doCreateBeanBox(ctx, clazz);
ctx.beanBoxMetaCache.put(clazz, box);
return box;
} } | public class class_name {
public static BeanBox getUniqueBeanBox(BeanBoxContext ctx, Class<?> clazz) {
BeanBoxException.assureNotNull(clazz, "Target class can not be null");
BeanBox box = ctx.beanBoxMetaCache.get(clazz);
if (box != null)
return box;
if (BeanBox.class.isAssignableFrom(clazz))
try {
box = (BeanBox) clazz.newInstance(); // depends on control dependency: [try], data = [none]
if (box.singleton == null)
box.singleton = true;
} catch (Exception e) {
BeanBoxException.throwEX(e);
} // depends on control dependency: [catch], data = [none]
else
box = doCreateBeanBox(ctx, clazz);
ctx.beanBoxMetaCache.put(clazz, box);
return box;
} } |
public class class_name {
public void storeCookies(URLConnection conn) throws IOException {
// let's determine the domain from where these cookies are being sent
String domain = getDomainFromHost(conn.getURL().getHost());
Map<String, Map<String, String>> domainStore; // this is where we will store cookies for this domain
// now let's check the store to see if we have an entry for this domain
if (store.containsKey(domain)) {
// we do, so lets retrieve it from the store
domainStore = store.get(domain);
} else {
// we don't, so let's create it and put it in the store
domainStore = new HashMap<>();
store.put(domain, domainStore);
}
// OK, now we are ready to get the cookies out of the URLConnection
String headerName = null;
for (int i = 1; (headerName = conn.getHeaderFieldKey(i)) != null; i++) {
if (headerName.equalsIgnoreCase(SET_COOKIE)) {
Map<String, String> cookie = new HashMap<>();
StringTokenizer st = new StringTokenizer(conn.getHeaderField(i), COOKIE_VALUE_DELIMITER);
// the specification dictates that the first name/value pair
// in the string is the cookie name and value, so let's handle
// them as a special case:
if (st.hasMoreTokens()) {
String token = st.nextToken();
String name = token.substring(0, token.indexOf(NAME_VALUE_SEPARATOR));
String value = token.substring(token.indexOf(NAME_VALUE_SEPARATOR) + 1, token.length());
domainStore.put(name, cookie);
cookie.put(name, value);
}
while (st.hasMoreTokens()) {
String token = st.nextToken().toLowerCase();
int idx = token.indexOf(NAME_VALUE_SEPARATOR);
if (idx > 0 && idx < token.length() - 1) {
cookie.put(token.substring(0, idx).toLowerCase(), token.substring(idx + 1, token
.length()));
}
}
}
}
} } | public class class_name {
public void storeCookies(URLConnection conn) throws IOException {
// let's determine the domain from where these cookies are being sent
String domain = getDomainFromHost(conn.getURL().getHost());
Map<String, Map<String, String>> domainStore; // this is where we will store cookies for this domain
// now let's check the store to see if we have an entry for this domain
if (store.containsKey(domain)) {
// we do, so lets retrieve it from the store
domainStore = store.get(domain);
} else {
// we don't, so let's create it and put it in the store
domainStore = new HashMap<>();
store.put(domain, domainStore);
}
// OK, now we are ready to get the cookies out of the URLConnection
String headerName = null;
for (int i = 1; (headerName = conn.getHeaderFieldKey(i)) != null; i++) {
if (headerName.equalsIgnoreCase(SET_COOKIE)) {
Map<String, String> cookie = new HashMap<>();
StringTokenizer st = new StringTokenizer(conn.getHeaderField(i), COOKIE_VALUE_DELIMITER);
// the specification dictates that the first name/value pair
// in the string is the cookie name and value, so let's handle
// them as a special case:
if (st.hasMoreTokens()) {
String token = st.nextToken();
String name = token.substring(0, token.indexOf(NAME_VALUE_SEPARATOR));
String value = token.substring(token.indexOf(NAME_VALUE_SEPARATOR) + 1, token.length());
domainStore.put(name, cookie); // depends on control dependency: [if], data = [none]
cookie.put(name, value); // depends on control dependency: [if], data = [none]
}
while (st.hasMoreTokens()) {
String token = st.nextToken().toLowerCase();
int idx = token.indexOf(NAME_VALUE_SEPARATOR);
if (idx > 0 && idx < token.length() - 1) {
cookie.put(token.substring(0, idx).toLowerCase(), token.substring(idx + 1, token
.length())); // depends on control dependency: [if], data = [none]
}
}
}
}
} } |
public class class_name {
public boolean connectIfPossible( MonitoringPoint monitoringPoint ) {
// check if the other point has this as related id
if (ID == monitoringPoint.getRelatedID()) {
pfafRelatedMonitoringPointsTable.put(monitoringPoint.getPfatstetterNumber().toString(),
monitoringPoint);
return true;
}
return false;
} } | public class class_name {
public boolean connectIfPossible( MonitoringPoint monitoringPoint ) {
// check if the other point has this as related id
if (ID == monitoringPoint.getRelatedID()) {
pfafRelatedMonitoringPointsTable.put(monitoringPoint.getPfatstetterNumber().toString(),
monitoringPoint); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
public boolean hasExtension(String namespace) {
synchronized (packetExtensions) {
for (ExtensionElement packetExtension : packetExtensions.values()) {
if (packetExtension.getNamespace().equals(namespace)) {
return true;
}
}
}
return false;
} } | public class class_name {
public boolean hasExtension(String namespace) {
synchronized (packetExtensions) {
for (ExtensionElement packetExtension : packetExtensions.values()) {
if (packetExtension.getNamespace().equals(namespace)) {
return true; // depends on control dependency: [if], data = [none]
}
}
}
return false;
} } |
public class class_name {
public long done() {
if (enabled) {
if (!done) {
lastCheckpointNanos = System.nanoTime();
done = true;
}
return lastCheckpointNanos - startNanos;
}
return -1;
} } | public class class_name {
public long done() {
if (enabled) {
if (!done) {
lastCheckpointNanos = System.nanoTime(); // depends on control dependency: [if], data = [none]
done = true; // depends on control dependency: [if], data = [none]
}
return lastCheckpointNanos - startNanos; // depends on control dependency: [if], data = [none]
}
return -1;
} } |
public class class_name {
public boolean isTemporaryPathTarget(Path path) {
LOG.trace("isTemporaryPathTarget for {}", path);
if (path.toString().equals(hostNameScheme) || path.getParent() == null) {
LOG.trace("Temporary target on the path eauals hostname or null parent {}", path);
return false;
}
String name = path.getName();
String parent = path.getParent().toString();
for (String tempPath : tempIdentifiers) {
String[] tempPathComponents = tempPath.split("/");
if (parent.endsWith(tempPathComponents[0].replace("ID", ""))
|| name.startsWith(tempPathComponents[0].replace("ID", ""))) {
LOG.debug("Temporary path identified on {}", path);
return true;
}
}
LOG.debug("Temporary path not identified for {}", path);
return false;
} } | public class class_name {
public boolean isTemporaryPathTarget(Path path) {
LOG.trace("isTemporaryPathTarget for {}", path);
if (path.toString().equals(hostNameScheme) || path.getParent() == null) {
LOG.trace("Temporary target on the path eauals hostname or null parent {}", path); // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
String name = path.getName();
String parent = path.getParent().toString();
for (String tempPath : tempIdentifiers) {
String[] tempPathComponents = tempPath.split("/");
if (parent.endsWith(tempPathComponents[0].replace("ID", ""))
|| name.startsWith(tempPathComponents[0].replace("ID", ""))) {
LOG.debug("Temporary path identified on {}", path); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
}
LOG.debug("Temporary path not identified for {}", path);
return false;
} } |
public class class_name {
protected void getSupplementationFromOtherSources(Budget budget, Map<Integer, String> hmBudgetQuestions) {
if (!hmBudgetQuestions.isEmpty()) {
if (hmBudgetQuestions.get(OTHER_SUPP_SOURCE) != null) {
if (hmBudgetQuestions.get(OTHER_SUPP_SOURCE).toString().toUpperCase().equals("Y")) {
SupplementationFromOtherSources supplementationFromOtherSources = budget
.addNewSupplementationFromOtherSources();
if (hmBudgetQuestions.get(SUPP_SOURCE) != null) {
supplementationFromOtherSources.setSource(hmBudgetQuestions.get(SUPP_SOURCE).toString());
supplementationFromOtherSources.setAmount(new BigDecimal(hmBudgetQuestions.get(SUPP_FUNDING_AMT).toString()));
try {
supplementationFromOtherSources.setNumberOfMonths(new BigDecimal(hmBudgetQuestions.get(SUPP_MONTHS).toString()));
} catch (Exception ex) {
}
supplementationFromOtherSources.setType(hmBudgetQuestions.get(SUPP_TYPE).toString());
}
}
}
}
} } | public class class_name {
protected void getSupplementationFromOtherSources(Budget budget, Map<Integer, String> hmBudgetQuestions) {
if (!hmBudgetQuestions.isEmpty()) {
if (hmBudgetQuestions.get(OTHER_SUPP_SOURCE) != null) {
if (hmBudgetQuestions.get(OTHER_SUPP_SOURCE).toString().toUpperCase().equals("Y")) {
SupplementationFromOtherSources supplementationFromOtherSources = budget
.addNewSupplementationFromOtherSources();
if (hmBudgetQuestions.get(SUPP_SOURCE) != null) {
supplementationFromOtherSources.setSource(hmBudgetQuestions.get(SUPP_SOURCE).toString()); // depends on control dependency: [if], data = [(hmBudgetQuestions.get(SUPP_SOURCE)]
supplementationFromOtherSources.setAmount(new BigDecimal(hmBudgetQuestions.get(SUPP_FUNDING_AMT).toString())); // depends on control dependency: [if], data = [none]
try {
supplementationFromOtherSources.setNumberOfMonths(new BigDecimal(hmBudgetQuestions.get(SUPP_MONTHS).toString())); // depends on control dependency: [try], data = [none]
} catch (Exception ex) {
} // depends on control dependency: [catch], data = [none]
supplementationFromOtherSources.setType(hmBudgetQuestions.get(SUPP_TYPE).toString()); // depends on control dependency: [if], data = [none]
}
}
}
}
} } |
public class class_name {
public static Color fromColorString(final String cssColorString)
{
final String str = cssColorString.toLowerCase().replaceAll(" ", "");
try
{
if (str.startsWith("#"))
{
return hex2RGB(str);
}
else
{
if (str.endsWith(")"))
{
if (str.startsWith("rgb("))
{
final String[] rgb = str.substring(4, str.length() - 1).split(",");
if (rgb.length != 3)
{
return null;
}
final int r = intOrPct(rgb[0], 255);
final int g = intOrPct(rgb[1], 255);
final int b = intOrPct(rgb[2], 255);
return new Color(r, g, b);
}
if (str.startsWith("rgba("))
{
final String[] rgba = str.substring(5, str.length() - 1).split(",");
if (rgba.length != 4)
{
return null;
}
final int r = intOrPct(rgba[0], 255);
final int g = intOrPct(rgba[1], 255);
final int b = intOrPct(rgba[2], 255);
final double a = doubleOrPct(rgba[3], 1);
return new Color(r, g, b, a);
}
if (str.startsWith("hsl("))
{
final String[] hsl = str.substring(4, str.length() - 1).split(",");
if (hsl.length != 3)
{
return null;
}
final double h = hueOrPct(hsl[0]);
final double s = percentage(hsl[1], 1);
final double l = percentage(hsl[2], 1);
return fromNormalizedHSL(h, s, l);
}
if (str.startsWith("hsla("))
{
final String[] hsla = str.substring(5, str.length() - 1).split(",");
if (hsla.length != 4)
{
return null;
}
final double h = hueOrPct(hsla[0]);
final double s = percentage(hsla[1], 1);
final double l = percentage(hsla[2], 1);
final double a = doubleOrPct(hsla[3], 1);
final Color col = fromNormalizedHSL(h, s, l);
col.setA(a);
return col;
}
}
final ColorName name = ColorName.lookup(str);
if (name != null)
{
return name.getColor();
}
}
return null;// unknown format
}
catch (final NumberFormatException e)
{
return null;
}
} } | public class class_name {
public static Color fromColorString(final String cssColorString)
{
final String str = cssColorString.toLowerCase().replaceAll(" ", "");
try
{
if (str.startsWith("#"))
{
return hex2RGB(str); // depends on control dependency: [if], data = [none]
}
else
{
if (str.endsWith(")"))
{
if (str.startsWith("rgb("))
{
final String[] rgb = str.substring(4, str.length() - 1).split(",");
if (rgb.length != 3)
{
return null; // depends on control dependency: [if], data = [none]
}
final int r = intOrPct(rgb[0], 255);
final int g = intOrPct(rgb[1], 255);
final int b = intOrPct(rgb[2], 255);
return new Color(r, g, b); // depends on control dependency: [if], data = [none]
}
if (str.startsWith("rgba("))
{
final String[] rgba = str.substring(5, str.length() - 1).split(",");
if (rgba.length != 4)
{
return null; // depends on control dependency: [if], data = [none]
}
final int r = intOrPct(rgba[0], 255);
final int g = intOrPct(rgba[1], 255);
final int b = intOrPct(rgba[2], 255);
final double a = doubleOrPct(rgba[3], 1);
return new Color(r, g, b, a); // depends on control dependency: [if], data = [none]
}
if (str.startsWith("hsl("))
{
final String[] hsl = str.substring(4, str.length() - 1).split(",");
if (hsl.length != 3)
{
return null; // depends on control dependency: [if], data = [none]
}
final double h = hueOrPct(hsl[0]);
final double s = percentage(hsl[1], 1);
final double l = percentage(hsl[2], 1);
return fromNormalizedHSL(h, s, l); // depends on control dependency: [if], data = [none]
}
if (str.startsWith("hsla("))
{
final String[] hsla = str.substring(5, str.length() - 1).split(",");
if (hsla.length != 4)
{
return null; // depends on control dependency: [if], data = [none]
}
final double h = hueOrPct(hsla[0]);
final double s = percentage(hsla[1], 1);
final double l = percentage(hsla[2], 1);
final double a = doubleOrPct(hsla[3], 1);
final Color col = fromNormalizedHSL(h, s, l);
col.setA(a); // depends on control dependency: [if], data = [none]
return col; // depends on control dependency: [if], data = [none]
}
}
final ColorName name = ColorName.lookup(str);
if (name != null)
{
return name.getColor(); // depends on control dependency: [if], data = [none]
}
}
return null;// unknown format // depends on control dependency: [try], data = [none]
}
catch (final NumberFormatException e)
{
return null;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void setDhcpOptions(java.util.Collection<DhcpOptions> dhcpOptions) {
if (dhcpOptions == null) {
this.dhcpOptions = null;
return;
}
this.dhcpOptions = new com.amazonaws.internal.SdkInternalList<DhcpOptions>(dhcpOptions);
} } | public class class_name {
public void setDhcpOptions(java.util.Collection<DhcpOptions> dhcpOptions) {
if (dhcpOptions == null) {
this.dhcpOptions = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.dhcpOptions = new com.amazonaws.internal.SdkInternalList<DhcpOptions>(dhcpOptions);
} } |
public class class_name {
public static int[] sliceByTime(CSTable table, int timeCol, Date start, Date end) {
if (end.before(start)) {
throw new IllegalArgumentException("end<start");
}
if (timeCol < 0) {
throw new IllegalArgumentException("timeCol :" + timeCol);
}
int s = -1;
int e = -1;
int i = -1;
for (String[] col : table.rows()) {
i++;
Date d = Conversions.convert(col[timeCol], Date.class);
if (s == -1 && (start.before(d) || start.equals(d))) {
s = i;
}
if (e == -1 && (end.before(d) || end.equals(d))) {
e = i;
break;
}
}
return new int[]{s, e};
} } | public class class_name {
public static int[] sliceByTime(CSTable table, int timeCol, Date start, Date end) {
if (end.before(start)) {
throw new IllegalArgumentException("end<start");
}
if (timeCol < 0) {
throw new IllegalArgumentException("timeCol :" + timeCol);
}
int s = -1;
int e = -1;
int i = -1;
for (String[] col : table.rows()) {
i++; // depends on control dependency: [for], data = [none]
Date d = Conversions.convert(col[timeCol], Date.class);
if (s == -1 && (start.before(d) || start.equals(d))) {
s = i; // depends on control dependency: [if], data = [none]
}
if (e == -1 && (end.before(d) || end.equals(d))) {
e = i; // depends on control dependency: [if], data = [none]
break;
}
}
return new int[]{s, e};
} } |
public class class_name {
@Override
public synchronized void addCallBack(RecoveryLogCallBack callback) {
if (tc.isEntryEnabled())
Tr.entry(tc, "addCallBack", callback);
if (_registeredCallbacks == null) {
_registeredCallbacks = new HashSet<RecoveryLogCallBack>();
}
_registeredCallbacks.add(callback);
if (tc.isEntryEnabled())
Tr.exit(tc, "addCallBack");
} } | public class class_name {
@Override
public synchronized void addCallBack(RecoveryLogCallBack callback) {
if (tc.isEntryEnabled())
Tr.entry(tc, "addCallBack", callback);
if (_registeredCallbacks == null) {
_registeredCallbacks = new HashSet<RecoveryLogCallBack>(); // depends on control dependency: [if], data = [none]
}
_registeredCallbacks.add(callback);
if (tc.isEntryEnabled())
Tr.exit(tc, "addCallBack");
} } |
public class class_name {
public void assertIsValidReferencePath(String path, String propertyName) {
if (path == null) {
return;
}
if (path.isEmpty()) {
problemReporter.report(new Problem(this, String.format("%s cannot be empty", propertyName)));
return;
}
} } | public class class_name {
public void assertIsValidReferencePath(String path, String propertyName) {
if (path == null) {
return; // depends on control dependency: [if], data = [none]
}
if (path.isEmpty()) {
problemReporter.report(new Problem(this, String.format("%s cannot be empty", propertyName))); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public final void genericMethodOrConstructorDecl() throws RecognitionException {
int genericMethodOrConstructorDecl_StartIndex = input.index();
try {
if ( state.backtracking>0 && alreadyParsedRule(input, 23) ) { return; }
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:375:5: ( typeParameters genericMethodOrConstructorRest )
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:375:7: typeParameters genericMethodOrConstructorRest
{
pushFollow(FOLLOW_typeParameters_in_genericMethodOrConstructorDecl781);
typeParameters();
state._fsp--;
if (state.failed) return;
pushFollow(FOLLOW_genericMethodOrConstructorRest_in_genericMethodOrConstructorDecl783);
genericMethodOrConstructorRest();
state._fsp--;
if (state.failed) return;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
if ( state.backtracking>0 ) { memoize(input, 23, genericMethodOrConstructorDecl_StartIndex); }
}
} } | public class class_name {
public final void genericMethodOrConstructorDecl() throws RecognitionException {
int genericMethodOrConstructorDecl_StartIndex = input.index();
try {
if ( state.backtracking>0 && alreadyParsedRule(input, 23) ) { return; } // depends on control dependency: [if], data = [none]
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:375:5: ( typeParameters genericMethodOrConstructorRest )
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:375:7: typeParameters genericMethodOrConstructorRest
{
pushFollow(FOLLOW_typeParameters_in_genericMethodOrConstructorDecl781);
typeParameters();
state._fsp--;
if (state.failed) return;
pushFollow(FOLLOW_genericMethodOrConstructorRest_in_genericMethodOrConstructorDecl783);
genericMethodOrConstructorRest();
state._fsp--;
if (state.failed) return;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
if ( state.backtracking>0 ) { memoize(input, 23, genericMethodOrConstructorDecl_StartIndex); } // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private Content getClassLinks(LinkInfoImpl.Kind context, Collection<?> list) {
Content dd = new HtmlTree(HtmlTag.DD);
boolean isFirst = true;
for (Object type : list) {
if (!isFirst) {
Content separator = new StringContent(", ");
dd.addContent(separator);
} else {
isFirst = false;
}
// TODO: should we simply split this method up to avoid instanceof ?
if (type instanceof TypeElement) {
Content link = getLink(
new LinkInfoImpl(configuration, context, (TypeElement)(type)));
dd.addContent(HtmlTree.CODE(link));
} else {
Content link = getLink(
new LinkInfoImpl(configuration, context, ((TypeMirror)type)));
dd.addContent(HtmlTree.CODE(link));
}
}
return dd;
} } | public class class_name {
private Content getClassLinks(LinkInfoImpl.Kind context, Collection<?> list) {
Content dd = new HtmlTree(HtmlTag.DD);
boolean isFirst = true;
for (Object type : list) {
if (!isFirst) {
Content separator = new StringContent(", ");
dd.addContent(separator); // depends on control dependency: [if], data = [none]
} else {
isFirst = false; // depends on control dependency: [if], data = [none]
}
// TODO: should we simply split this method up to avoid instanceof ?
if (type instanceof TypeElement) {
Content link = getLink(
new LinkInfoImpl(configuration, context, (TypeElement)(type)));
dd.addContent(HtmlTree.CODE(link)); // depends on control dependency: [if], data = [none]
} else {
Content link = getLink(
new LinkInfoImpl(configuration, context, ((TypeMirror)type)));
dd.addContent(HtmlTree.CODE(link)); // depends on control dependency: [if], data = [none]
}
}
return dd;
} } |
public class class_name {
public static boolean isAssignableFrom(String lookingFor, TypeDescriptor candidate) {
String[] interfaces = candidate.getSuperinterfacesName();
for (String intface : interfaces) {
if (intface.equals(lookingFor)) {
return true;
}
boolean b = isAssignableFrom(lookingFor, candidate.getTypeRegistry().getDescriptorFor(intface));
if (b) {
return true;
}
}
String supertypename = candidate.getSupertypeName();
if (supertypename == null) {
return false;
}
if (supertypename.equals(lookingFor)) {
return true;
}
return isAssignableFrom(lookingFor, candidate.getTypeRegistry().getDescriptorFor(supertypename));
} } | public class class_name {
public static boolean isAssignableFrom(String lookingFor, TypeDescriptor candidate) {
String[] interfaces = candidate.getSuperinterfacesName();
for (String intface : interfaces) {
if (intface.equals(lookingFor)) {
return true; // depends on control dependency: [if], data = [none]
}
boolean b = isAssignableFrom(lookingFor, candidate.getTypeRegistry().getDescriptorFor(intface));
if (b) {
return true; // depends on control dependency: [if], data = [none]
}
}
String supertypename = candidate.getSupertypeName();
if (supertypename == null) {
return false; // depends on control dependency: [if], data = [none]
}
if (supertypename.equals(lookingFor)) {
return true; // depends on control dependency: [if], data = [none]
}
return isAssignableFrom(lookingFor, candidate.getTypeRegistry().getDescriptorFor(supertypename));
} } |
public class class_name {
@Override
// Check is broken [LOG.info()]: PMD reports issues although log stmt is guarded. @todo revisit when upgrading PMD.
@SuppressWarnings("PMD.GuardLogStatementJavaUtil")
public final void audit(final Events event, final String domain, final String username) {
Validate.notNull(event, "The validated object 'event' is null");
Validate.notBlank(domain, "The validated character sequence 'domain' is null or empty");
Validate.notBlank(username, "The validated character sequence 'username' is null or empty");
// PMD does not recognize the guarded log statement
if (LOG.isInfoEnabled()) {
LOG.info("[AUDIT] " + event.getValue() + ". User name '" + username + "', domain '" + domain + "'");
}
} } | public class class_name {
@Override
// Check is broken [LOG.info()]: PMD reports issues although log stmt is guarded. @todo revisit when upgrading PMD.
@SuppressWarnings("PMD.GuardLogStatementJavaUtil")
public final void audit(final Events event, final String domain, final String username) {
Validate.notNull(event, "The validated object 'event' is null");
Validate.notBlank(domain, "The validated character sequence 'domain' is null or empty");
Validate.notBlank(username, "The validated character sequence 'username' is null or empty");
// PMD does not recognize the guarded log statement
if (LOG.isInfoEnabled()) {
LOG.info("[AUDIT] " + event.getValue() + ". User name '" + username + "', domain '" + domain + "'"); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void marshall(WorkflowExecution workflowExecution, ProtocolMarshaller protocolMarshaller) {
if (workflowExecution == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(workflowExecution.getWorkflowId(), WORKFLOWID_BINDING);
protocolMarshaller.marshall(workflowExecution.getRunId(), RUNID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(WorkflowExecution workflowExecution, ProtocolMarshaller protocolMarshaller) {
if (workflowExecution == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(workflowExecution.getWorkflowId(), WORKFLOWID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(workflowExecution.getRunId(), RUNID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void marshall(UpdateDatastoreRequest updateDatastoreRequest, ProtocolMarshaller protocolMarshaller) {
if (updateDatastoreRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateDatastoreRequest.getDatastoreName(), DATASTORENAME_BINDING);
protocolMarshaller.marshall(updateDatastoreRequest.getRetentionPeriod(), RETENTIONPERIOD_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(UpdateDatastoreRequest updateDatastoreRequest, ProtocolMarshaller protocolMarshaller) {
if (updateDatastoreRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateDatastoreRequest.getDatastoreName(), DATASTORENAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(updateDatastoreRequest.getRetentionPeriod(), RETENTIONPERIOD_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public synchronized void execDistributed(@NonNull TrainingMessage message) {
/**
* Basically we should batch messages coming from different TrainingFunctions on spark executor side here.
* So we pack them into batches, and send over the wire to selected Shard
*/
Frame currentFrame;
if ((currentFrame = frames.get(message.getClass().getSimpleName())) == null) {
currentFrame = new Frame<>(BasicSequenceProvider.getInstance().getNextValue());
frames.put(message.getClass().getSimpleName(), currentFrame);
}
currentFrame.stackMessage(message);
// TODO: make this threshold variable
if (currentFrame.size() >= 128) {
transport.sendMessage(currentFrame);
currentFrame = new Frame<>(BasicSequenceProvider.getInstance().getNextValue());
frames.put(message.getClass().getSimpleName(), currentFrame);
}
//transport.sendMessage(message);
} } | public class class_name {
public synchronized void execDistributed(@NonNull TrainingMessage message) {
/**
* Basically we should batch messages coming from different TrainingFunctions on spark executor side here.
* So we pack them into batches, and send over the wire to selected Shard
*/
Frame currentFrame;
if ((currentFrame = frames.get(message.getClass().getSimpleName())) == null) {
currentFrame = new Frame<>(BasicSequenceProvider.getInstance().getNextValue()); // depends on control dependency: [if], data = [none]
frames.put(message.getClass().getSimpleName(), currentFrame); // depends on control dependency: [if], data = [none]
}
currentFrame.stackMessage(message);
// TODO: make this threshold variable
if (currentFrame.size() >= 128) {
transport.sendMessage(currentFrame); // depends on control dependency: [if], data = [none]
currentFrame = new Frame<>(BasicSequenceProvider.getInstance().getNextValue()); // depends on control dependency: [if], data = [none]
frames.put(message.getClass().getSimpleName(), currentFrame); // depends on control dependency: [if], data = [none]
}
//transport.sendMessage(message);
} } |
public class class_name {
private static List<X509Certificate> parsePKIPATH(InputStream is)
throws CertificateException {
List<X509Certificate> certList = null;
CertificateFactory certFac = null;
if (is == null) {
throw new CertificateException("input stream is null");
}
try {
DerInputStream dis = new DerInputStream(readAllBytes(is));
DerValue[] seq = dis.getSequence(3);
if (seq.length == 0) {
return Collections.<X509Certificate>emptyList();
}
certFac = CertificateFactory.getInstance("X.509");
certList = new ArrayList<X509Certificate>(seq.length);
// append certs in reverse order (target to trust anchor)
for (int i = seq.length-1; i >= 0; i--) {
certList.add((X509Certificate)certFac.generateCertificate
(new ByteArrayInputStream(seq[i].toByteArray())));
}
return Collections.unmodifiableList(certList);
} catch (IOException ioe) {
throw new CertificateException("IOException parsing PkiPath data: "
+ ioe, ioe);
}
} } | public class class_name {
private static List<X509Certificate> parsePKIPATH(InputStream is)
throws CertificateException {
List<X509Certificate> certList = null;
CertificateFactory certFac = null;
if (is == null) {
throw new CertificateException("input stream is null");
}
try {
DerInputStream dis = new DerInputStream(readAllBytes(is));
DerValue[] seq = dis.getSequence(3);
if (seq.length == 0) {
return Collections.<X509Certificate>emptyList(); // depends on control dependency: [if], data = [none]
}
certFac = CertificateFactory.getInstance("X.509");
certList = new ArrayList<X509Certificate>(seq.length);
// append certs in reverse order (target to trust anchor)
for (int i = seq.length-1; i >= 0; i--) {
certList.add((X509Certificate)certFac.generateCertificate
(new ByteArrayInputStream(seq[i].toByteArray()))); // depends on control dependency: [for], data = [none]
}
return Collections.unmodifiableList(certList);
} catch (IOException ioe) {
throw new CertificateException("IOException parsing PkiPath data: "
+ ioe, ioe);
}
} } |
public class class_name {
public UserQueryListing getAllUserQueries() {
UserQuery userQueryToGetInfoFor = new UserQuery();
if(this.serviceTicket != null) {
userQueryToGetInfoFor.setServiceTicket(this.serviceTicket);
}
try {
return new UserQueryListing(this.postJson(
userQueryToGetInfoFor, WS.Path.UserQuery.Version1.getAllUserQueries()));
}
//
catch (JSONException jsonExcept) {
throw new FluidClientException(jsonExcept.getMessage(),
FluidClientException.ErrorCode.JSON_PARSING);
}
} } | public class class_name {
public UserQueryListing getAllUserQueries() {
UserQuery userQueryToGetInfoFor = new UserQuery();
if(this.serviceTicket != null) {
userQueryToGetInfoFor.setServiceTicket(this.serviceTicket); // depends on control dependency: [if], data = [(this.serviceTicket]
}
try {
return new UserQueryListing(this.postJson(
userQueryToGetInfoFor, WS.Path.UserQuery.Version1.getAllUserQueries())); // depends on control dependency: [try], data = [none]
}
//
catch (JSONException jsonExcept) {
throw new FluidClientException(jsonExcept.getMessage(),
FluidClientException.ErrorCode.JSON_PARSING);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void open(File versionDir) {
/* acquire modification lock */
fileModificationLock.writeLock().lock();
try {
/* check that the store is currently closed */
if(isOpen)
throw new IllegalStateException("Attempt to open already open store.");
// Find version directory from symbolic link or max version id
if(versionDir == null) {
versionDir = ReadOnlyUtils.getCurrentVersion(storeDir);
if(versionDir == null)
versionDir = new File(storeDir, "version-0");
}
// Set the max version id
long versionId = ReadOnlyUtils.getVersionId(versionDir);
if(versionId == -1) {
throw new VoldemortException("Unable to parse id from version directory "
+ versionDir.getAbsolutePath());
}
Utils.mkdirs(versionDir);
// Validate symbolic link, and create it if it doesn't already exist
Utils.symlink(versionDir.getAbsolutePath(), storeDir.getAbsolutePath() + File.separator + "latest");
this.fileSet = new ChunkedFileSet(versionDir, routingStrategy, nodeId, maxValueBufferAllocationSize);
storeVersionManager.syncInternalStateFromFileSystem(false);
this.lastSwapped = System.currentTimeMillis();
this.isOpen = true;
} catch(IOException e) {
logger.error("Error in opening store", e);
} finally {
fileModificationLock.writeLock().unlock();
}
} } | public class class_name {
public void open(File versionDir) {
/* acquire modification lock */
fileModificationLock.writeLock().lock();
try {
/* check that the store is currently closed */
if(isOpen)
throw new IllegalStateException("Attempt to open already open store.");
// Find version directory from symbolic link or max version id
if(versionDir == null) {
versionDir = ReadOnlyUtils.getCurrentVersion(storeDir); // depends on control dependency: [if], data = [none]
if(versionDir == null)
versionDir = new File(storeDir, "version-0");
}
// Set the max version id
long versionId = ReadOnlyUtils.getVersionId(versionDir);
if(versionId == -1) {
throw new VoldemortException("Unable to parse id from version directory "
+ versionDir.getAbsolutePath());
}
Utils.mkdirs(versionDir); // depends on control dependency: [try], data = [none]
// Validate symbolic link, and create it if it doesn't already exist
Utils.symlink(versionDir.getAbsolutePath(), storeDir.getAbsolutePath() + File.separator + "latest"); // depends on control dependency: [try], data = [none]
this.fileSet = new ChunkedFileSet(versionDir, routingStrategy, nodeId, maxValueBufferAllocationSize); // depends on control dependency: [try], data = [none]
storeVersionManager.syncInternalStateFromFileSystem(false); // depends on control dependency: [try], data = [none]
this.lastSwapped = System.currentTimeMillis(); // depends on control dependency: [try], data = [none]
this.isOpen = true; // depends on control dependency: [try], data = [none]
} catch(IOException e) {
logger.error("Error in opening store", e);
} finally { // depends on control dependency: [catch], data = [none]
fileModificationLock.writeLock().unlock();
}
} } |
public class class_name {
private void reset(boolean skipBuffer) { // expected diff with AnsiOutputStream.java
if (!skipBuffer) {
ps.write(buffer, 0, pos); // expected diff with AnsiOutputStream.java
}
pos = 0;
startOfValue = 0;
options.clear();
state = LOOKING_FOR_FIRST_ESC_CHAR;
} } | public class class_name {
private void reset(boolean skipBuffer) { // expected diff with AnsiOutputStream.java
if (!skipBuffer) {
ps.write(buffer, 0, pos); // expected diff with AnsiOutputStream.java // depends on control dependency: [if], data = [none]
}
pos = 0;
startOfValue = 0;
options.clear();
state = LOOKING_FOR_FIRST_ESC_CHAR;
} } |
public class class_name {
@Override
public void run() {
// flag for logging in the "final" block
boolean docOk = false;
try {
// create the index document
m_result = createIndexDocument(m_cms, m_res, m_index, m_count, m_report);
docOk = true;
// check if the thread was interrupted
if (isInterrupted() && LOG.isDebugEnabled()) {
LOG.debug(
Messages.get().getBundle().key(Messages.LOG_ABANDONED_THREAD_FINISHED_1, m_res.getRootPath()));
}
} catch (CmsIndexNoContentException e) {
// Ignore exception caused by empty documents, so that the report is not messed up with error message
m_report.println(
org.opencms.report.Messages.get().container(org.opencms.report.Messages.RPT_OK_0),
I_CmsReport.FORMAT_OK);
} catch (Throwable exc) {
if (m_report != null) {
m_report.println(
org.opencms.report.Messages.get().container(org.opencms.report.Messages.RPT_FAILED_0),
I_CmsReport.FORMAT_ERROR);
m_report.println(
org.opencms.report.Messages.get().container(
org.opencms.report.Messages.RPT_ARGUMENT_1,
exc.toString()),
I_CmsReport.FORMAT_ERROR);
}
if (LOG.isErrorEnabled()) {
LOG.error(
Messages.get().getBundle().key(
Messages.ERR_INDEX_RESOURCE_FAILED_2,
m_res.getRootPath(),
m_index.getName()),
exc);
}
// set flag to avoid logging in finally block
docOk = true;
} finally {
if (!docOk) {
// apparently there was a Throwable that causes an issue
if (m_report != null) {
m_report.println(
org.opencms.report.Messages.get().container(org.opencms.report.Messages.RPT_FAILED_0),
I_CmsReport.FORMAT_ERROR);
m_report.println(
Messages.get().container(
Messages.ERR_INDEX_RESOURCE_FAILED_2,
m_res.getRootPath(),
m_index.getName()),
I_CmsReport.FORMAT_ERROR);
}
if (LOG.isErrorEnabled()) {
LOG.error(
Messages.get().getBundle().key(
Messages.ERR_INDEX_RESOURCE_FAILED_2,
m_res.getRootPath(),
m_index.getName()));
}
}
}
} } | public class class_name {
@Override
public void run() {
// flag for logging in the "final" block
boolean docOk = false;
try {
// create the index document
m_result = createIndexDocument(m_cms, m_res, m_index, m_count, m_report); // depends on control dependency: [try], data = [none]
docOk = true; // depends on control dependency: [try], data = [none]
// check if the thread was interrupted
if (isInterrupted() && LOG.isDebugEnabled()) {
LOG.debug(
Messages.get().getBundle().key(Messages.LOG_ABANDONED_THREAD_FINISHED_1, m_res.getRootPath())); // depends on control dependency: [if], data = [none]
}
} catch (CmsIndexNoContentException e) {
// Ignore exception caused by empty documents, so that the report is not messed up with error message
m_report.println(
org.opencms.report.Messages.get().container(org.opencms.report.Messages.RPT_OK_0),
I_CmsReport.FORMAT_OK);
} catch (Throwable exc) { // depends on control dependency: [catch], data = [none]
if (m_report != null) {
m_report.println(
org.opencms.report.Messages.get().container(org.opencms.report.Messages.RPT_FAILED_0),
I_CmsReport.FORMAT_ERROR); // depends on control dependency: [if], data = [none]
m_report.println(
org.opencms.report.Messages.get().container(
org.opencms.report.Messages.RPT_ARGUMENT_1,
exc.toString()),
I_CmsReport.FORMAT_ERROR); // depends on control dependency: [if], data = [none]
}
if (LOG.isErrorEnabled()) {
LOG.error(
Messages.get().getBundle().key(
Messages.ERR_INDEX_RESOURCE_FAILED_2,
m_res.getRootPath(),
m_index.getName()),
exc); // depends on control dependency: [if], data = [none]
}
// set flag to avoid logging in finally block
docOk = true;
} finally { // depends on control dependency: [catch], data = [none]
if (!docOk) {
// apparently there was a Throwable that causes an issue
if (m_report != null) {
m_report.println(
org.opencms.report.Messages.get().container(org.opencms.report.Messages.RPT_FAILED_0),
I_CmsReport.FORMAT_ERROR); // depends on control dependency: [if], data = [none]
m_report.println(
Messages.get().container(
Messages.ERR_INDEX_RESOURCE_FAILED_2,
m_res.getRootPath(),
m_index.getName()),
I_CmsReport.FORMAT_ERROR); // depends on control dependency: [if], data = [none]
}
if (LOG.isErrorEnabled()) {
LOG.error(
Messages.get().getBundle().key(
Messages.ERR_INDEX_RESOURCE_FAILED_2,
m_res.getRootPath(),
m_index.getName())); // depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
protected static MetaLocale parse(String tag) {
if (tag.indexOf('_') != -1) {
tag = tag.replace('_', SEP);
}
Maybe<Pair<MetaLocale, CharSequence>> result = P_LANGUAGE_TAG.parse(tag);
// This parser is for internal use only during code generation, so we blow up
// severely if a language tag fails to parse..
if (result.isNothing()) {
throw new IllegalArgumentException("Failed to parse language tag: '" + tag + "'");
}
return result.get()._1;
} } | public class class_name {
protected static MetaLocale parse(String tag) {
if (tag.indexOf('_') != -1) {
tag = tag.replace('_', SEP); // depends on control dependency: [if], data = [none]
}
Maybe<Pair<MetaLocale, CharSequence>> result = P_LANGUAGE_TAG.parse(tag);
// This parser is for internal use only during code generation, so we blow up
// severely if a language tag fails to parse..
if (result.isNothing()) {
throw new IllegalArgumentException("Failed to parse language tag: '" + tag + "'");
}
return result.get()._1;
} } |
public class class_name {
@Override
public void setAtoms(IAtom[] newAtoms) {
// unregister this as listener with the old atoms
for (int i = 0; i < atomCount; i++) {
this.atoms[i].removeListener(this);
}
for (IAtom atom : newAtoms) {
atom.addListener(this);
}
ensureAtomCapacity(newAtoms.length);
System.arraycopy(newAtoms, 0, this.atoms, 0, newAtoms.length);
if (newAtoms.length < this.atoms.length)
Arrays.fill(atoms, newAtoms.length, this.atoms.length, null);
this.atomCount = newAtoms.length;
notifyChanged();
} } | public class class_name {
@Override
public void setAtoms(IAtom[] newAtoms) {
// unregister this as listener with the old atoms
for (int i = 0; i < atomCount; i++) {
this.atoms[i].removeListener(this); // depends on control dependency: [for], data = [i]
}
for (IAtom atom : newAtoms) {
atom.addListener(this); // depends on control dependency: [for], data = [atom]
}
ensureAtomCapacity(newAtoms.length);
System.arraycopy(newAtoms, 0, this.atoms, 0, newAtoms.length);
if (newAtoms.length < this.atoms.length)
Arrays.fill(atoms, newAtoms.length, this.atoms.length, null);
this.atomCount = newAtoms.length;
notifyChanged();
} } |
public class class_name {
public static Map<String, String> getClusterNameTags(Configuration conf) {
ImmutableMap.Builder<String, String> tagMap = ImmutableMap.builder();
String clusterIdentifierTag = ClustersNames.getInstance().getClusterName(conf);
if (!Strings.isNullOrEmpty(clusterIdentifierTag)) {
tagMap.put(CLUSTER_IDENTIFIER_TAG_NAME, clusterIdentifierTag);
}
return tagMap.build();
} } | public class class_name {
public static Map<String, String> getClusterNameTags(Configuration conf) {
ImmutableMap.Builder<String, String> tagMap = ImmutableMap.builder();
String clusterIdentifierTag = ClustersNames.getInstance().getClusterName(conf);
if (!Strings.isNullOrEmpty(clusterIdentifierTag)) {
tagMap.put(CLUSTER_IDENTIFIER_TAG_NAME, clusterIdentifierTag); // depends on control dependency: [if], data = [none]
}
return tagMap.build();
} } |
public class class_name {
protected Xdr getCredential() {
Xdr credentials = new Xdr(_maximumXdrBytes);
credentials.setOffset(0);
credentials.putInt((int) (System.currentTimeMillis() / 1000));
credentials.putString(_host);
credentials.putInt(_uid);
credentials.putInt(_gid);
if (_gids == null) {
credentials.putInt(0);
} else {
credentials.putInt(_gids.length);
for (int i = 0; i < _gids.length; i++)
credentials.putInt(_gids[i]);
}
return credentials;
} } | public class class_name {
protected Xdr getCredential() {
Xdr credentials = new Xdr(_maximumXdrBytes);
credentials.setOffset(0);
credentials.putInt((int) (System.currentTimeMillis() / 1000));
credentials.putString(_host);
credentials.putInt(_uid);
credentials.putInt(_gid);
if (_gids == null) {
credentials.putInt(0); // depends on control dependency: [if], data = [none]
} else {
credentials.putInt(_gids.length); // depends on control dependency: [if], data = [(_gids]
for (int i = 0; i < _gids.length; i++)
credentials.putInt(_gids[i]);
}
return credentials;
} } |
public class class_name {
private synchronized static Forest init(String key, KV<String, Forest> kv, boolean reload) {
Forest forest = kv.getV();
if (forest != null) {
if (reload) {
forest.clear();
} else {
return forest;
}
} else {
forest = new Forest();
}
try {
LOG.debug("begin init dic !");
long start = System.currentTimeMillis();
String temp = null;
String[] strs = null;
Value value = null;
try (BufferedReader br = IOUtil.getReader(PathToStream.stream(kv.getK()), "UTF-8")) {
while ((temp = br.readLine()) != null) {
if (StringUtil.isNotBlank(temp)) {
temp = StringUtil.trim(temp);
strs = temp.split("\t");
strs[0] = strs[0].toLowerCase();
// 如何核心辞典存在那么就放弃
if (MyStaticValue.isSkipUserDefine && DATDictionary.getId(strs[0]) > 0) {
continue;
}
if (strs.length != 3) {
value = new Value(strs[0], DEFAULT_NATURE, DEFAULT_FREQ_STR);
} else {
value = new Value(strs[0], strs[1], strs[2]);
}
Library.insertWord(forest, value);
}
}
}
LOG.info("load dic use time:" + (System.currentTimeMillis() - start) + " path is : " + kv.getK());
kv.setV(forest);
return forest;
} catch (Exception e) {
LOG.error("Init dic library error :" + e.getMessage() + ", path: " + kv.getK());
DIC.remove(key);
return null;
}
} } | public class class_name {
private synchronized static Forest init(String key, KV<String, Forest> kv, boolean reload) {
Forest forest = kv.getV();
if (forest != null) {
if (reload) {
forest.clear(); // depends on control dependency: [if], data = [none]
} else {
return forest; // depends on control dependency: [if], data = [none]
}
} else {
forest = new Forest(); // depends on control dependency: [if], data = [none]
}
try {
LOG.debug("begin init dic !"); // depends on control dependency: [try], data = [none]
long start = System.currentTimeMillis();
String temp = null;
String[] strs = null;
Value value = null;
try (BufferedReader br = IOUtil.getReader(PathToStream.stream(kv.getK()), "UTF-8")) {
while ((temp = br.readLine()) != null) {
if (StringUtil.isNotBlank(temp)) {
temp = StringUtil.trim(temp); // depends on control dependency: [if], data = [none]
strs = temp.split("\t"); // depends on control dependency: [if], data = [none]
strs[0] = strs[0].toLowerCase(); // depends on control dependency: [if], data = [none]
// 如何核心辞典存在那么就放弃
if (MyStaticValue.isSkipUserDefine && DATDictionary.getId(strs[0]) > 0) {
continue;
}
if (strs.length != 3) {
value = new Value(strs[0], DEFAULT_NATURE, DEFAULT_FREQ_STR); // depends on control dependency: [if], data = [none]
} else {
value = new Value(strs[0], strs[1], strs[2]); // depends on control dependency: [if], data = [none]
}
Library.insertWord(forest, value); // depends on control dependency: [if], data = [none]
}
}
}
LOG.info("load dic use time:" + (System.currentTimeMillis() - start) + " path is : " + kv.getK());
kv.setV(forest);
return forest;
} catch (Exception e) {
LOG.error("Init dic library error :" + e.getMessage() + ", path: " + kv.getK());
DIC.remove(key);
return null;
} // depends on control dependency: [catch], data = [none]
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.