code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
|---|---|
public class class_name {
@Override
public void update(Object obj) {
lock.writeLock().lock();
try {
populated = true;
this.queueActionLocked(DeltaType.Updated, obj);
} finally {
lock.writeLock().unlock();
}
} }
|
public class class_name {
@Override
public void update(Object obj) {
lock.writeLock().lock();
try {
populated = true; // depends on control dependency: [try], data = [none]
this.queueActionLocked(DeltaType.Updated, obj); // depends on control dependency: [try], data = [none]
} finally {
lock.writeLock().unlock();
}
} }
|
public class class_name {
public Integer getInteger(final String key) {
Number number = (Number) map.get(key);
if (number == null) {
return null;
}
if (number instanceof Integer) {
return (Integer) number;
}
return number.intValue();
} }
|
public class class_name {
public Integer getInteger(final String key) {
Number number = (Number) map.get(key);
if (number == null) {
return null; // depends on control dependency: [if], data = [none]
}
if (number instanceof Integer) {
return (Integer) number; // depends on control dependency: [if], data = [none]
}
return number.intValue();
} }
|
public class class_name {
protected void removeAllFromAttributeValueSet() {
final Collection<AbstractHtml5SharedObject> sharedObjects = getSharedObjects();
boolean listenerInvoked = false;
final Collection<WriteLock> writeLocks = lockAndGetWriteLocks();
try {
getAttributeValueSet().clear();
setModified(true);
invokeValueChangeListeners(sharedObjects);
listenerInvoked = true;
} finally {
for (final Lock lock : writeLocks) {
lock.unlock();
}
}
pushQueues(sharedObjects, listenerInvoked);
} }
|
public class class_name {
protected void removeAllFromAttributeValueSet() {
final Collection<AbstractHtml5SharedObject> sharedObjects = getSharedObjects();
boolean listenerInvoked = false;
final Collection<WriteLock> writeLocks = lockAndGetWriteLocks();
try {
getAttributeValueSet().clear(); // depends on control dependency: [try], data = [none]
setModified(true); // depends on control dependency: [try], data = [none]
invokeValueChangeListeners(sharedObjects); // depends on control dependency: [try], data = [none]
listenerInvoked = true; // depends on control dependency: [try], data = [none]
} finally {
for (final Lock lock : writeLocks) {
lock.unlock(); // depends on control dependency: [for], data = [lock]
}
}
pushQueues(sharedObjects, listenerInvoked);
} }
|
public class class_name {
public ByteBuffer wrap(int offset, int length) {
if (offset > this.memory.length || offset > this.memory.length - length) {
throw new IndexOutOfBoundsException();
}
if (this.wrapper == null) {
this.wrapper = ByteBuffer.wrap(this.memory, offset, length);
}
else {
this.wrapper.position(offset);
this.wrapper.limit(offset + length);
}
return this.wrapper;
} }
|
public class class_name {
public ByteBuffer wrap(int offset, int length) {
if (offset > this.memory.length || offset > this.memory.length - length) {
throw new IndexOutOfBoundsException();
}
if (this.wrapper == null) {
this.wrapper = ByteBuffer.wrap(this.memory, offset, length); // depends on control dependency: [if], data = [none]
}
else {
this.wrapper.position(offset); // depends on control dependency: [if], data = [none]
this.wrapper.limit(offset + length); // depends on control dependency: [if], data = [none]
}
return this.wrapper;
} }
|
public class class_name {
protected void initIncidentHandlers() {
if (incidentHandlers == null) {
incidentHandlers = new HashMap<String, IncidentHandler>();
DefaultIncidentHandler failedJobIncidentHandler = new DefaultIncidentHandler(Incident.FAILED_JOB_HANDLER_TYPE);
incidentHandlers.put(failedJobIncidentHandler.getIncidentHandlerType(), failedJobIncidentHandler);
DefaultIncidentHandler failedExternalTaskIncidentHandler = new DefaultIncidentHandler(Incident.EXTERNAL_TASK_HANDLER_TYPE);
incidentHandlers.put(failedExternalTaskIncidentHandler.getIncidentHandlerType(), failedExternalTaskIncidentHandler);
}
if (customIncidentHandlers != null) {
for (IncidentHandler incidentHandler : customIncidentHandlers) {
incidentHandlers.put(incidentHandler.getIncidentHandlerType(), incidentHandler);
}
}
} }
|
public class class_name {
protected void initIncidentHandlers() {
if (incidentHandlers == null) {
incidentHandlers = new HashMap<String, IncidentHandler>(); // depends on control dependency: [if], data = [none]
DefaultIncidentHandler failedJobIncidentHandler = new DefaultIncidentHandler(Incident.FAILED_JOB_HANDLER_TYPE);
incidentHandlers.put(failedJobIncidentHandler.getIncidentHandlerType(), failedJobIncidentHandler); // depends on control dependency: [if], data = [none]
DefaultIncidentHandler failedExternalTaskIncidentHandler = new DefaultIncidentHandler(Incident.EXTERNAL_TASK_HANDLER_TYPE);
incidentHandlers.put(failedExternalTaskIncidentHandler.getIncidentHandlerType(), failedExternalTaskIncidentHandler); // depends on control dependency: [if], data = [none]
}
if (customIncidentHandlers != null) {
for (IncidentHandler incidentHandler : customIncidentHandlers) {
incidentHandlers.put(incidentHandler.getIncidentHandlerType(), incidentHandler); // depends on control dependency: [for], data = [incidentHandler]
}
}
} }
|
public class class_name {
@Override
public synchronized Map<Object, Object> getProperties()
{
if (properties == null || this.isAlwaysReload())
{
loadProperties();
}
// return copy
return new HashMap<Object, Object>(properties);
} }
|
public class class_name {
@Override
public synchronized Map<Object, Object> getProperties()
{
if (properties == null || this.isAlwaysReload())
{
loadProperties(); // depends on control dependency: [if], data = [none]
}
// return copy
return new HashMap<Object, Object>(properties);
} }
|
public class class_name {
public MetricDatum withCounts(Double... counts) {
if (this.counts == null) {
setCounts(new com.amazonaws.internal.SdkInternalList<Double>(counts.length));
}
for (Double ele : counts) {
this.counts.add(ele);
}
return this;
} }
|
public class class_name {
public MetricDatum withCounts(Double... counts) {
if (this.counts == null) {
setCounts(new com.amazonaws.internal.SdkInternalList<Double>(counts.length)); // depends on control dependency: [if], data = [none]
}
for (Double ele : counts) {
this.counts.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
public static String transform(String template, Map<String,Object> map) {
// validations
if(template == null) {
return null;
}
if(map == null) {
return template;
}
// transform
List<String> nameList = new ArrayList<String>();
nameList.addAll(map.keySet());
for(String name : nameList) {
template = template.replaceAll("\\$\\{" + name + "}", map.get(name).toString());
}
return template;
} }
|
public class class_name {
public static String transform(String template, Map<String,Object> map) {
// validations
if(template == null) {
return null; // depends on control dependency: [if], data = [none]
}
if(map == null) {
return template; // depends on control dependency: [if], data = [none]
}
// transform
List<String> nameList = new ArrayList<String>();
nameList.addAll(map.keySet());
for(String name : nameList) {
template = template.replaceAll("\\$\\{" + name + "}", map.get(name).toString()); // depends on control dependency: [for], data = [name]
}
return template;
} }
|
public class class_name {
private void addJolokiaId(Map<String, String> pFinalCfg) {
if (!pFinalCfg.containsKey(ConfigKey.AGENT_ID.getKeyValue())) {
pFinalCfg.put(ConfigKey.AGENT_ID.getKeyValue(), NetworkUtil.getAgentId(hashCode(),"jvm"));
}
pFinalCfg.put(ConfigKey.AGENT_TYPE.getKeyValue(), "jvm");
} }
|
public class class_name {
private void addJolokiaId(Map<String, String> pFinalCfg) {
if (!pFinalCfg.containsKey(ConfigKey.AGENT_ID.getKeyValue())) {
pFinalCfg.put(ConfigKey.AGENT_ID.getKeyValue(), NetworkUtil.getAgentId(hashCode(),"jvm")); // depends on control dependency: [if], data = [none]
}
pFinalCfg.put(ConfigKey.AGENT_TYPE.getKeyValue(), "jvm");
} }
|
public class class_name {
@SuppressWarnings("unused")
private ApproximationLine approximateUpperHullOld(ConvexHull convexHull, double[] log_k, double sum_log_k, double sum_log_k2, double[] log_kDist, double sum_log_kDist, double sum_log_k_kDist) {
StringBuilder msg = new StringBuilder(10000);
int[] upperHull = convexHull.getUpperHull();
int u = convexHull.getNumberOfPointsInUpperHull();
int k_0 = settings.kmax - upperHull.length + 1;
// linear search on all line segments on the upper convex hull
msg.append("upper hull:").append(u);
double upp_error = Double.MAX_VALUE;
double upp_m = 0.0;
double upp_t = 0.0;
for(int i = 1; i < u; i++) {
double cur_m = (log_kDist[upperHull[i]] - log_kDist[upperHull[i - 1]]) / (log_k[upperHull[i]] - log_k[upperHull[i - 1]]);
double cur_t = log_kDist[upperHull[i]] - cur_m * log_k[upperHull[i]];
double cur_error = ssqerr(k_0, settings.kmax, log_k, log_kDist, cur_m, cur_t);
if(cur_error < upp_error) {
upp_error = cur_error;
upp_m = cur_m;
upp_t = cur_t;
}
}
// linear search on all points of the upper convex hull
boolean is_left = true; // NEEDED FOR PROOF CHECK
for(int i = 0; i < u; i++) {
double cur_m = optimize(k_0, settings.kmax, sum_log_k, sum_log_k2, log_k[upperHull[i]], log_kDist[upperHull[i]], sum_log_k_kDist, sum_log_kDist);
double cur_t = log_kDist[upperHull[i]] - cur_m * log_k[upperHull[i]];
// only valid if both neighboring points are underneath y=mx+t
if((i == 0 || log_kDist[upperHull[i - 1]] <= log_kDist[upperHull[i]] - cur_m * (log_k[upperHull[i]] - log_k[upperHull[i - 1]])) && (i == u - 1 || log_kDist[upperHull[i + 1]] <= log_kDist[upperHull[i]] + cur_m * (log_k[upperHull[i + 1]] - log_k[upperHull[i]]))) {
double cur_error = ssqerr(k_0, settings.kmax, log_k, log_kDist, cur_m, cur_t);
if(cur_error < upp_error) {
upp_error = cur_error;
upp_m = cur_m;
upp_t = cur_t;
}
}
// check proof of bisection search
if(!(i > 0 && log_kDist[upperHull[i - 1]] > log_kDist[upperHull[i]] - cur_m * (log_k[upperHull[i]] - log_k[upperHull[i - 1]])) && !is_left) {
LOG.warning("ERROR upper: The bisection search will not work properly !" + "\n" + FormatUtil.format(log_kDist));
}
if(!(i < u - 1 && log_kDist[upperHull[i + 1]] > log_kDist[upperHull[i]] + cur_m * (log_k[upperHull[i + 1]] - log_k[upperHull[i]]))) {
is_left = false;
}
}
return new ApproximationLine(k_0, upp_m, upp_t);
} }
|
public class class_name {
@SuppressWarnings("unused")
private ApproximationLine approximateUpperHullOld(ConvexHull convexHull, double[] log_k, double sum_log_k, double sum_log_k2, double[] log_kDist, double sum_log_kDist, double sum_log_k_kDist) {
StringBuilder msg = new StringBuilder(10000);
int[] upperHull = convexHull.getUpperHull();
int u = convexHull.getNumberOfPointsInUpperHull();
int k_0 = settings.kmax - upperHull.length + 1;
// linear search on all line segments on the upper convex hull
msg.append("upper hull:").append(u);
double upp_error = Double.MAX_VALUE;
double upp_m = 0.0;
double upp_t = 0.0;
for(int i = 1; i < u; i++) {
double cur_m = (log_kDist[upperHull[i]] - log_kDist[upperHull[i - 1]]) / (log_k[upperHull[i]] - log_k[upperHull[i - 1]]);
double cur_t = log_kDist[upperHull[i]] - cur_m * log_k[upperHull[i]];
double cur_error = ssqerr(k_0, settings.kmax, log_k, log_kDist, cur_m, cur_t);
if(cur_error < upp_error) {
upp_error = cur_error; // depends on control dependency: [if], data = [none]
upp_m = cur_m; // depends on control dependency: [if], data = [none]
upp_t = cur_t; // depends on control dependency: [if], data = [none]
}
}
// linear search on all points of the upper convex hull
boolean is_left = true; // NEEDED FOR PROOF CHECK
for(int i = 0; i < u; i++) {
double cur_m = optimize(k_0, settings.kmax, sum_log_k, sum_log_k2, log_k[upperHull[i]], log_kDist[upperHull[i]], sum_log_k_kDist, sum_log_kDist);
double cur_t = log_kDist[upperHull[i]] - cur_m * log_k[upperHull[i]];
// only valid if both neighboring points are underneath y=mx+t
if((i == 0 || log_kDist[upperHull[i - 1]] <= log_kDist[upperHull[i]] - cur_m * (log_k[upperHull[i]] - log_k[upperHull[i - 1]])) && (i == u - 1 || log_kDist[upperHull[i + 1]] <= log_kDist[upperHull[i]] + cur_m * (log_k[upperHull[i + 1]] - log_k[upperHull[i]]))) {
double cur_error = ssqerr(k_0, settings.kmax, log_k, log_kDist, cur_m, cur_t);
if(cur_error < upp_error) {
upp_error = cur_error; // depends on control dependency: [if], data = [none]
upp_m = cur_m; // depends on control dependency: [if], data = [none]
upp_t = cur_t; // depends on control dependency: [if], data = [none]
}
}
// check proof of bisection search
if(!(i > 0 && log_kDist[upperHull[i - 1]] > log_kDist[upperHull[i]] - cur_m * (log_k[upperHull[i]] - log_k[upperHull[i - 1]])) && !is_left) {
LOG.warning("ERROR upper: The bisection search will not work properly !" + "\n" + FormatUtil.format(log_kDist)); // depends on control dependency: [if], data = [none]
}
if(!(i < u - 1 && log_kDist[upperHull[i + 1]] > log_kDist[upperHull[i]] + cur_m * (log_k[upperHull[i + 1]] - log_k[upperHull[i]]))) {
is_left = false; // depends on control dependency: [if], data = [none]
}
}
return new ApproximationLine(k_0, upp_m, upp_t);
} }
|
public class class_name {
@Nullable
public <R> R reduce(@Nullable R identity,
@NotNull BiFunction<? super R, ? super T, ? extends R> accumulator) {
R result = identity;
while (iterator.hasNext()) {
final T value = iterator.next();
result = accumulator.apply(result, value);
}
return result;
} }
|
public class class_name {
@Nullable
public <R> R reduce(@Nullable R identity,
@NotNull BiFunction<? super R, ? super T, ? extends R> accumulator) {
R result = identity;
while (iterator.hasNext()) {
final T value = iterator.next();
result = accumulator.apply(result, value); // depends on control dependency: [while], data = [none]
}
return result;
} }
|
public class class_name {
private void print(TaskListener listener, int depth) {
indent(listener, depth);
Run<?, ?> upstreamRun = getUpstreamRun();
if (upstreamRun == null) {
listener.getLogger().println("Started by upstream build " + ModelHyperlinkNote.encodeTo('/' + getUpstreamUrl(), getUpstreamProject()) +
"\" #" + ModelHyperlinkNote.encodeTo('/' + getUpstreamUrl() + getUpstreamBuild(), Integer.toString(getUpstreamBuild())) +
" generating Maven artifact: " + getMavenArtifactsDescription());
} else {
listener.getLogger().println("Started by upstream build " +
ModelHyperlinkNote.encodeTo('/' + upstreamRun.getUrl(), upstreamRun.getFullDisplayName()) + " generating Maven artifacts: " + getMavenArtifactsDescription());
}
if (getUpstreamCauses() != null && !getUpstreamCauses().isEmpty()) {
indent(listener, depth);
listener.getLogger().println("originally caused by:");
for (Cause cause : getUpstreamCauses()) {
if (cause instanceof MavenDependencyUpstreamCause) {
((MavenDependencyUpstreamCause) cause).print(listener, depth + 1);
} else {
indent(listener, depth + 1);
cause.print(listener);
}
}
}
} }
|
public class class_name {
private void print(TaskListener listener, int depth) {
indent(listener, depth);
Run<?, ?> upstreamRun = getUpstreamRun();
if (upstreamRun == null) {
listener.getLogger().println("Started by upstream build " + ModelHyperlinkNote.encodeTo('/' + getUpstreamUrl(), getUpstreamProject()) +
"\" #" + ModelHyperlinkNote.encodeTo('/' + getUpstreamUrl() + getUpstreamBuild(), Integer.toString(getUpstreamBuild())) +
" generating Maven artifact: " + getMavenArtifactsDescription()); // depends on control dependency: [if], data = [none]
} else {
listener.getLogger().println("Started by upstream build " +
ModelHyperlinkNote.encodeTo('/' + upstreamRun.getUrl(), upstreamRun.getFullDisplayName()) + " generating Maven artifacts: " + getMavenArtifactsDescription()); // depends on control dependency: [if], data = [none]
}
if (getUpstreamCauses() != null && !getUpstreamCauses().isEmpty()) {
indent(listener, depth); // depends on control dependency: [if], data = [none]
listener.getLogger().println("originally caused by:"); // depends on control dependency: [if], data = [none]
for (Cause cause : getUpstreamCauses()) {
if (cause instanceof MavenDependencyUpstreamCause) {
((MavenDependencyUpstreamCause) cause).print(listener, depth + 1); // depends on control dependency: [if], data = [none]
} else {
indent(listener, depth + 1); // depends on control dependency: [if], data = [none]
cause.print(listener); // depends on control dependency: [if], data = [none]
}
}
}
} }
|
public class class_name {
public boolean printData(PrintWriter out, int iPrintOptions)
{
if ((this.getScreenField().getConverter().getField() instanceof XmlField)
|| (this.getScreenField().getConverter().getField() instanceof HtmlField)
|| (this.getScreenField().getConverter().getField() instanceof XMLPropertiesField))
{
boolean bFieldsFound = false;
String strFieldName = this.getScreenField().getSFieldParam();
// Do NOT encode the data!
String strFieldData = this.getScreenField().getSFieldValue(true, false);
out.println(" <" + strFieldName + '>' + strFieldData + "</" + strFieldName + '>');
return bFieldsFound;
}
else
return super.printData(out, iPrintOptions);
} }
|
public class class_name {
public boolean printData(PrintWriter out, int iPrintOptions)
{
if ((this.getScreenField().getConverter().getField() instanceof XmlField)
|| (this.getScreenField().getConverter().getField() instanceof HtmlField)
|| (this.getScreenField().getConverter().getField() instanceof XMLPropertiesField))
{
boolean bFieldsFound = false;
String strFieldName = this.getScreenField().getSFieldParam();
// Do NOT encode the data!
String strFieldData = this.getScreenField().getSFieldValue(true, false);
out.println(" <" + strFieldName + '>' + strFieldData + "</" + strFieldName + '>'); // depends on control dependency: [if], data = [none]
return bFieldsFound; // depends on control dependency: [if], data = [none]
}
else
return super.printData(out, iPrintOptions);
} }
|
public class class_name {
public static ApplicationContext findApplicationContext(ServletContext servletContext) {
if(servletContext == null) {
return ContextLoader.getCurrentWebApplicationContext();
}
return WebApplicationContextUtils.getWebApplicationContext(servletContext);
} }
|
public class class_name {
public static ApplicationContext findApplicationContext(ServletContext servletContext) {
if(servletContext == null) {
return ContextLoader.getCurrentWebApplicationContext(); // depends on control dependency: [if], data = [none]
}
return WebApplicationContextUtils.getWebApplicationContext(servletContext);
} }
|
public class class_name {
public BatchGetRepositoriesResult withRepositories(RepositoryMetadata... repositories) {
if (this.repositories == null) {
setRepositories(new java.util.ArrayList<RepositoryMetadata>(repositories.length));
}
for (RepositoryMetadata ele : repositories) {
this.repositories.add(ele);
}
return this;
} }
|
public class class_name {
public BatchGetRepositoriesResult withRepositories(RepositoryMetadata... repositories) {
if (this.repositories == null) {
setRepositories(new java.util.ArrayList<RepositoryMetadata>(repositories.length)); // depends on control dependency: [if], data = [none]
}
for (RepositoryMetadata ele : repositories) {
this.repositories.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
public Object add(String key, Object value) {
if (!this.containsKey(key)) {
this.put(key, value);
return null;
}
return this.get(key);
} }
|
public class class_name {
public Object add(String key, Object value) {
if (!this.containsKey(key)) {
this.put(key, value); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
return this.get(key);
} }
|
public class class_name {
public void startStep(final String uuid, final StepResult result) {
final Optional<String> current = threadContext.getCurrent();
if (!current.isPresent()) {
LOGGER.error("Could not start step: no test case running");
return;
}
final String parentUuid = current.get();
startStep(parentUuid, uuid, result);
} }
|
public class class_name {
public void startStep(final String uuid, final StepResult result) {
final Optional<String> current = threadContext.getCurrent();
if (!current.isPresent()) {
LOGGER.error("Could not start step: no test case running"); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
final String parentUuid = current.get();
startStep(parentUuid, uuid, result);
} }
|
public class class_name {
public static String formatDuration(final long durationMillis, final String format, final boolean padWithZeros) {
Validate.inclusiveBetween(0, Long.MAX_VALUE, durationMillis, "durationMillis must not be negative");
final Token[] tokens = lexx(format);
long days = 0;
long hours = 0;
long minutes = 0;
long seconds = 0;
long milliseconds = durationMillis;
if (Token.containsTokenWithValue(tokens, d) ) {
days = milliseconds / DateUtils.MILLIS_PER_DAY;
milliseconds = milliseconds - (days * DateUtils.MILLIS_PER_DAY);
}
if (Token.containsTokenWithValue(tokens, H) ) {
hours = milliseconds / DateUtils.MILLIS_PER_HOUR;
milliseconds = milliseconds - (hours * DateUtils.MILLIS_PER_HOUR);
}
if (Token.containsTokenWithValue(tokens, m) ) {
minutes = milliseconds / DateUtils.MILLIS_PER_MINUTE;
milliseconds = milliseconds - (minutes * DateUtils.MILLIS_PER_MINUTE);
}
if (Token.containsTokenWithValue(tokens, s) ) {
seconds = milliseconds / DateUtils.MILLIS_PER_SECOND;
milliseconds = milliseconds - (seconds * DateUtils.MILLIS_PER_SECOND);
}
return format(tokens, 0, 0, days, hours, minutes, seconds, milliseconds, padWithZeros);
} }
|
public class class_name {
public static String formatDuration(final long durationMillis, final String format, final boolean padWithZeros) {
Validate.inclusiveBetween(0, Long.MAX_VALUE, durationMillis, "durationMillis must not be negative");
final Token[] tokens = lexx(format);
long days = 0;
long hours = 0;
long minutes = 0;
long seconds = 0;
long milliseconds = durationMillis;
if (Token.containsTokenWithValue(tokens, d) ) {
days = milliseconds / DateUtils.MILLIS_PER_DAY; // depends on control dependency: [if], data = [none]
milliseconds = milliseconds - (days * DateUtils.MILLIS_PER_DAY); // depends on control dependency: [if], data = [none]
}
if (Token.containsTokenWithValue(tokens, H) ) {
hours = milliseconds / DateUtils.MILLIS_PER_HOUR; // depends on control dependency: [if], data = [none]
milliseconds = milliseconds - (hours * DateUtils.MILLIS_PER_HOUR); // depends on control dependency: [if], data = [none]
}
if (Token.containsTokenWithValue(tokens, m) ) {
minutes = milliseconds / DateUtils.MILLIS_PER_MINUTE; // depends on control dependency: [if], data = [none]
milliseconds = milliseconds - (minutes * DateUtils.MILLIS_PER_MINUTE); // depends on control dependency: [if], data = [none]
}
if (Token.containsTokenWithValue(tokens, s) ) {
seconds = milliseconds / DateUtils.MILLIS_PER_SECOND; // depends on control dependency: [if], data = [none]
milliseconds = milliseconds - (seconds * DateUtils.MILLIS_PER_SECOND); // depends on control dependency: [if], data = [none]
}
return format(tokens, 0, 0, days, hours, minutes, seconds, milliseconds, padWithZeros);
} }
|
public class class_name {
public void setTextAttributes(byte[] fields, Hashtable attrib) {
for (byte field : fields) {
getFieldInfos(field).m_textAttributes = attrib;
}
notifyListeners();
} }
|
public class class_name {
public void setTextAttributes(byte[] fields, Hashtable attrib) {
for (byte field : fields) {
getFieldInfos(field).m_textAttributes = attrib;
// depends on control dependency: [for], data = [field]
}
notifyListeners();
} }
|
public class class_name {
@NonNull
@UiThread
private HashMap<Integer, Boolean> generateExpandedStateMap() {
HashMap<Integer, Boolean> parentHashMap = new HashMap<>();
int childCount = 0;
int listItemCount = mFlatItemList.size();
for (int i = 0; i < listItemCount; i++) {
if (mFlatItemList.get(i) != null) {
ExpandableWrapper<P, C> listItem = mFlatItemList.get(i);
if (listItem.isParent()) {
parentHashMap.put(i - childCount, listItem.isExpanded());
} else {
childCount++;
}
}
}
return parentHashMap;
} }
|
public class class_name {
@NonNull
@UiThread
private HashMap<Integer, Boolean> generateExpandedStateMap() {
HashMap<Integer, Boolean> parentHashMap = new HashMap<>();
int childCount = 0;
int listItemCount = mFlatItemList.size();
for (int i = 0; i < listItemCount; i++) {
if (mFlatItemList.get(i) != null) {
ExpandableWrapper<P, C> listItem = mFlatItemList.get(i);
if (listItem.isParent()) {
parentHashMap.put(i - childCount, listItem.isExpanded()); // depends on control dependency: [if], data = [none]
} else {
childCount++; // depends on control dependency: [if], data = [none]
}
}
}
return parentHashMap;
} }
|
public class class_name {
public static boolean hasAtLeastOneFieldOrMethod(ClassNode node, String... types) {
Set<String> typesSet = new HashSet<>(Arrays.asList(types));
for (FieldNode field : node.getFields()) {
if (typesSet.contains(field.getType().getName())) {
return true;
}
}
for (MethodNode method : node.getMethods()) {
if (typesSet.contains(method.getReturnType().getName())) {
return true;
}
}
return false;
} }
|
public class class_name {
public static boolean hasAtLeastOneFieldOrMethod(ClassNode node, String... types) {
Set<String> typesSet = new HashSet<>(Arrays.asList(types));
for (FieldNode field : node.getFields()) {
if (typesSet.contains(field.getType().getName())) {
return true; // depends on control dependency: [if], data = [none]
}
}
for (MethodNode method : node.getMethods()) {
if (typesSet.contains(method.getReturnType().getName())) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} }
|
public class class_name {
public boolean doesExist(ClassDescriptor cld, Identity oid, Object obj)
{
boolean result = false;
String sql = (String) sqlSelectMap.get(cld);
if(sql == null)
{
sql = new SqlExistStatement(cld, LoggerFactory.getDefaultLogger()).getStatement();
sqlSelectMap.put(cld, sql);
}
ValueContainer[] pkValues;
if(oid == null)
{
pkValues = getKeyValues(cld, obj, true);
}
else
{
pkValues = getKeyValues(cld, oid);
}
StatementManagerIF sm = m_broker.serviceStatementManager();
PreparedStatement stmt = null;
ResultSet rs = null;
try
{
stmt = sm.getPreparedStatement(cld, sql, false, 1, false);
sm.bindValues(stmt, pkValues, 1);
rs = stmt.executeQuery();
result = rs.next();
}
catch(SQLException e)
{
throw ExceptionHelper.generateException("[BrokerHelper#doesExist] Can't check if specified" +
" object is already persisted", e, sql, cld, pkValues, null, obj);
}
finally
{
sm.closeResources(stmt, rs);
}
return result;
} }
|
public class class_name {
public boolean doesExist(ClassDescriptor cld, Identity oid, Object obj)
{
boolean result = false;
String sql = (String) sqlSelectMap.get(cld);
if(sql == null)
{
sql = new SqlExistStatement(cld, LoggerFactory.getDefaultLogger()).getStatement();
// depends on control dependency: [if], data = [none]
sqlSelectMap.put(cld, sql);
// depends on control dependency: [if], data = [none]
}
ValueContainer[] pkValues;
if(oid == null)
{
pkValues = getKeyValues(cld, obj, true);
// depends on control dependency: [if], data = [none]
}
else
{
pkValues = getKeyValues(cld, oid);
// depends on control dependency: [if], data = [none]
}
StatementManagerIF sm = m_broker.serviceStatementManager();
PreparedStatement stmt = null;
ResultSet rs = null;
try
{
stmt = sm.getPreparedStatement(cld, sql, false, 1, false);
// depends on control dependency: [try], data = [none]
sm.bindValues(stmt, pkValues, 1);
// depends on control dependency: [try], data = [none]
rs = stmt.executeQuery();
// depends on control dependency: [try], data = [none]
result = rs.next();
// depends on control dependency: [try], data = [none]
}
catch(SQLException e)
{
throw ExceptionHelper.generateException("[BrokerHelper#doesExist] Can't check if specified" +
" object is already persisted", e, sql, cld, pkValues, null, obj);
}
// depends on control dependency: [catch], data = [none]
finally
{
sm.closeResources(stmt, rs);
}
return result;
} }
|
public class class_name {
public static PathElement parse(String key) {
if ( "*".equals( key ) ) {
return new StarAllPathElement( key );
}
int numOfStars = StringTools.countMatches( key, "*" );
if (numOfStars == 1) {
return new StarSinglePathElement(key);
} else if (numOfStars == 2) {
return new StarDoublePathElement(key);
} else if (numOfStars > 2) {
return new StarRegexPathElement(key);
} else {
return new LiteralPathElement(key);
}
} }
|
public class class_name {
public static PathElement parse(String key) {
if ( "*".equals( key ) ) {
return new StarAllPathElement( key ); // depends on control dependency: [if], data = [none]
}
int numOfStars = StringTools.countMatches( key, "*" );
if (numOfStars == 1) {
return new StarSinglePathElement(key); // depends on control dependency: [if], data = [none]
} else if (numOfStars == 2) {
return new StarDoublePathElement(key); // depends on control dependency: [if], data = [none]
} else if (numOfStars > 2) {
return new StarRegexPathElement(key); // depends on control dependency: [if], data = [none]
} else {
return new LiteralPathElement(key); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private <E extends WeightedEdge> SparseDoubleVector getVertexWeightVector(
WeightedGraph<E> g, int vertex) {
if (keepWeightVectors) {
SparseDoubleVector weightVec = vertexToWeightVector.get(vertex);
if (weightVec == null) {
synchronized(this) {
weightVec = vertexToWeightVector.get(vertex);
if (weightVec == null) {
weightVec = computeWeightVector(g, vertex);
vertexToWeightVector.put(vertex, weightVec);
}
}
}
return weightVec;
}
else
return computeWeightVector(g, vertex);
} }
|
public class class_name {
private <E extends WeightedEdge> SparseDoubleVector getVertexWeightVector(
WeightedGraph<E> g, int vertex) {
if (keepWeightVectors) {
SparseDoubleVector weightVec = vertexToWeightVector.get(vertex);
if (weightVec == null) {
synchronized(this) { // depends on control dependency: [if], data = [none]
weightVec = vertexToWeightVector.get(vertex);
if (weightVec == null) {
weightVec = computeWeightVector(g, vertex); // depends on control dependency: [if], data = [none]
vertexToWeightVector.put(vertex, weightVec); // depends on control dependency: [if], data = [none]
}
}
}
return weightVec; // depends on control dependency: [if], data = [none]
}
else
return computeWeightVector(g, vertex);
} }
|
public class class_name {
public static String pad(String s, int length)
{
if (s.length() < length)
{
StringBuffer a = new StringBuffer(length);
a.append(s);
for (int i = s.length(); i < length; i++)
{
a = a.append(" ");
}
return a.toString();
}
else if (s.length() > length)
{
return s.substring(0, length);
}
else
{
return s;
}
} }
|
public class class_name {
public static String pad(String s, int length)
{
if (s.length() < length)
{
StringBuffer a = new StringBuffer(length);
a.append(s); // depends on control dependency: [if], data = [none]
for (int i = s.length(); i < length; i++)
{
a = a.append(" "); // depends on control dependency: [for], data = [none]
}
return a.toString(); // depends on control dependency: [if], data = [none]
}
else if (s.length() > length)
{
return s.substring(0, length); // depends on control dependency: [if], data = [length)]
}
else
{
return s; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public T get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
this.lock.lock();
try {
if (this.completed) {
return this.result;
}
this.result = getPoolObject(timeout, unit);
this.completed = true;
return result;
} catch (IOException ex) {
this.completed = true;
this.result = null;
throw new ExecutionException(ex);
} finally {
this.lock.unlock();
}
} }
|
public class class_name {
@Override
public T get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
this.lock.lock();
try {
if (this.completed) {
return this.result; // depends on control dependency: [if], data = [none]
}
this.result = getPoolObject(timeout, unit);
this.completed = true;
return result;
} catch (IOException ex) {
this.completed = true;
this.result = null;
throw new ExecutionException(ex);
} finally {
this.lock.unlock();
}
} }
|
public class class_name {
@Override
public void onReceive(Context context, Intent intent) {
final String action = intent.getAction();
if (Intent.ACTION_BATTERY_LOW.equals(action)) {
// If we get this action, the battery is discharging => it isn't plugged in so
// there's no work to cancel. We track this variable for the case where it is
// charging, but hasn't been for long enough to be healthy.
ControllerPrefs.getInstance(context).edit().setBatteryLow(true).apply();
} else if (Intent.ACTION_BATTERY_OKAY.equals(action)) {
ControllerPrefs.getInstance(context).edit().setBatteryLow(false).apply();
maybeReportNewChargingState(context, isCharging(context));
} else if (Intent.ACTION_POWER_CONNECTED.equals(action)) {
// Set up an alarm for ACTION_CHARGING_STABLE - we don't want to kick off tasks
// here if the user unplugs the phone immediately.
setStableChargingAlarm(context);
} else if (Intent.ACTION_POWER_DISCONNECTED.equals(action)) {
// If an alarm is set, breathe a sigh of relief and cancel it - crisis averted.
cancelStableChargingAlarm(context);
maybeReportNewChargingState(context, false);
} else if (ACTION_CHARGING_STABLE.equals(action)) {
// Here's where we actually do the notify for a task being ready.
maybeReportNewChargingState(context, !isBatteryLow(context));
}
} }
|
public class class_name {
@Override
public void onReceive(Context context, Intent intent) {
final String action = intent.getAction();
if (Intent.ACTION_BATTERY_LOW.equals(action)) {
// If we get this action, the battery is discharging => it isn't plugged in so
// there's no work to cancel. We track this variable for the case where it is
// charging, but hasn't been for long enough to be healthy.
ControllerPrefs.getInstance(context).edit().setBatteryLow(true).apply(); // depends on control dependency: [if], data = [none]
} else if (Intent.ACTION_BATTERY_OKAY.equals(action)) {
ControllerPrefs.getInstance(context).edit().setBatteryLow(false).apply(); // depends on control dependency: [if], data = [none]
maybeReportNewChargingState(context, isCharging(context)); // depends on control dependency: [if], data = [none]
} else if (Intent.ACTION_POWER_CONNECTED.equals(action)) {
// Set up an alarm for ACTION_CHARGING_STABLE - we don't want to kick off tasks
// here if the user unplugs the phone immediately.
setStableChargingAlarm(context); // depends on control dependency: [if], data = [none]
} else if (Intent.ACTION_POWER_DISCONNECTED.equals(action)) {
// If an alarm is set, breathe a sigh of relief and cancel it - crisis averted.
cancelStableChargingAlarm(context); // depends on control dependency: [if], data = [none]
maybeReportNewChargingState(context, false); // depends on control dependency: [if], data = [none]
} else if (ACTION_CHARGING_STABLE.equals(action)) {
// Here's where we actually do the notify for a task being ready.
maybeReportNewChargingState(context, !isBatteryLow(context)); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public void encodeTo(Encoder enc) {
if (!isMutated()) {
if (flDict == null) {
enc.beginDict(0);
enc.endDict();
}
else { enc.writeValue(flDict); }
}
else {
enc.beginDict(valCount);
for (Map.Entry<String, MValue> entry : valueMap.entrySet()) {
final MValue value = entry.getValue();
if (!value.isEmpty()) {
enc.writeKey(entry.getKey());
value.encodeTo(enc);
}
}
if (flDict != null) {
final FLDictIterator itr = new FLDictIterator();
try {
itr.begin(flDict);
String key;
while ((key = itr.getKeyString()) != null) {
if (!valueMap.containsKey(key)) {
enc.writeKey(key);
enc.writeValue(itr.getValue());
}
if (!itr.next()) { break; }
}
}
finally {
itr.free();
}
}
enc.endDict();
}
} }
|
public class class_name {
@Override
public void encodeTo(Encoder enc) {
if (!isMutated()) {
if (flDict == null) {
enc.beginDict(0); // depends on control dependency: [if], data = [none]
enc.endDict(); // depends on control dependency: [if], data = [none]
}
else { enc.writeValue(flDict); } // depends on control dependency: [if], data = [(flDict]
}
else {
enc.beginDict(valCount); // depends on control dependency: [if], data = [none]
for (Map.Entry<String, MValue> entry : valueMap.entrySet()) {
final MValue value = entry.getValue();
if (!value.isEmpty()) {
enc.writeKey(entry.getKey()); // depends on control dependency: [if], data = [none]
value.encodeTo(enc); // depends on control dependency: [if], data = [none]
}
}
if (flDict != null) {
final FLDictIterator itr = new FLDictIterator();
try {
itr.begin(flDict); // depends on control dependency: [try], data = [none]
String key;
while ((key = itr.getKeyString()) != null) {
if (!valueMap.containsKey(key)) {
enc.writeKey(key); // depends on control dependency: [if], data = [none]
enc.writeValue(itr.getValue()); // depends on control dependency: [if], data = [none]
}
if (!itr.next()) { break; }
}
}
finally {
itr.free();
}
}
enc.endDict(); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public boolean isNexusUsesProxy() {
// If they didn't specify whether Nexus needs to use the proxy, we should
// still honor the property if it's set.
if (line == null || !line.hasOption(ARGUMENT.NEXUS_USES_PROXY)) {
try {
return settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY);
} catch (InvalidSettingException ise) {
return true;
}
} else {
return Boolean.parseBoolean(line.getOptionValue(ARGUMENT.NEXUS_USES_PROXY));
}
} }
|
public class class_name {
public boolean isNexusUsesProxy() {
// If they didn't specify whether Nexus needs to use the proxy, we should
// still honor the property if it's set.
if (line == null || !line.hasOption(ARGUMENT.NEXUS_USES_PROXY)) {
try {
return settings.getBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY); // depends on control dependency: [try], data = [none]
} catch (InvalidSettingException ise) {
return true;
} // depends on control dependency: [catch], data = [none]
} else {
return Boolean.parseBoolean(line.getOptionValue(ARGUMENT.NEXUS_USES_PROXY)); // depends on control dependency: [if], data = [(line]
}
} }
|
public class class_name {
public static int getTotalPhysicalMemorySizeInMB() {
int memorySizeInMB;
try {
long memorySizeInBytes = ((com.sun.management.OperatingSystemMXBean) ManagementFactory
.getOperatingSystemMXBean()).getTotalPhysicalMemorySize();
memorySizeInMB = (int) (memorySizeInBytes / BYTES_IN_MEGABYTE);
} catch (Exception e) {
memorySizeInMB = -1;
}
return memorySizeInMB;
} }
|
public class class_name {
public static int getTotalPhysicalMemorySizeInMB() {
int memorySizeInMB;
try {
long memorySizeInBytes = ((com.sun.management.OperatingSystemMXBean) ManagementFactory
.getOperatingSystemMXBean()).getTotalPhysicalMemorySize();
memorySizeInMB = (int) (memorySizeInBytes / BYTES_IN_MEGABYTE); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
memorySizeInMB = -1;
} // depends on control dependency: [catch], data = [none]
return memorySizeInMB;
} }
|
public class class_name {
public void createSecurityGroup(String groupName,
String description) {
AmazonEC2 amazonEC2 = getEc2Client();
try {
final CreateSecurityGroupRequest securityGroupRequest = new CreateSecurityGroupRequest()
.withGroupName(groupName)
.withDescription(description);
amazonEC2.createSecurityGroup(securityGroupRequest);
LOGGER.info("Created Security Group: " + groupName);
} catch (AmazonServiceException ase) {
// This might mean that security group is already created, hence ignore
LOGGER.warn("Issue in creating security group", ase);
}
} }
|
public class class_name {
public void createSecurityGroup(String groupName,
String description) {
AmazonEC2 amazonEC2 = getEc2Client();
try {
final CreateSecurityGroupRequest securityGroupRequest = new CreateSecurityGroupRequest()
.withGroupName(groupName)
.withDescription(description);
amazonEC2.createSecurityGroup(securityGroupRequest); // depends on control dependency: [try], data = [none]
LOGGER.info("Created Security Group: " + groupName); // depends on control dependency: [try], data = [none]
} catch (AmazonServiceException ase) {
// This might mean that security group is already created, hence ignore
LOGGER.warn("Issue in creating security group", ase);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static Number getPushNumber(InstructionHandle h) {
Instruction prevIns = h.getInstruction();
if (prevIns instanceof BIPUSH) {
BIPUSH ldcCipher = (BIPUSH) prevIns;
return ldcCipher.getValue();
} else if (prevIns instanceof SIPUSH) {
SIPUSH ldcCipher = (SIPUSH) prevIns;
return ldcCipher.getValue();
}
return null;
} }
|
public class class_name {
public static Number getPushNumber(InstructionHandle h) {
Instruction prevIns = h.getInstruction();
if (prevIns instanceof BIPUSH) {
BIPUSH ldcCipher = (BIPUSH) prevIns;
return ldcCipher.getValue(); // depends on control dependency: [if], data = [none]
} else if (prevIns instanceof SIPUSH) {
SIPUSH ldcCipher = (SIPUSH) prevIns;
return ldcCipher.getValue(); // depends on control dependency: [if], data = [none]
}
return null;
} }
|
public class class_name {
private void scheduleScanningIfExternalClass(final String className, final String relationship) {
// Don't scan Object
if (className != null && !className.equals("java.lang.Object")
// Only schedule each external class once for scanning, across all threads
&& classNamesScheduledForScanning.add(className)) {
// Search for the named class' classfile among classpath elements, in classpath order (this is O(N)
// for each class, but there shouldn't be too many cases of extending scanning upwards)
final String classfilePath = JarUtils.classNameToClassfilePath(className);
// First check current classpath element, to avoid iterating through other classpath elements
Resource classResource = classpathElement.getResource(classfilePath);
ClasspathElement foundInClasspathElt = null;
if (classResource != null) {
// Found the classfile in the current classpath element
foundInClasspathElt = classpathElement;
} else {
// Didn't find the classfile in the current classpath element -- iterate through other elements
for (final ClasspathElement classpathOrderElt : classpathOrder) {
if (classpathOrderElt != classpathElement) {
classResource = classpathOrderElt.getResource(classfilePath);
if (classResource != null) {
foundInClasspathElt = classpathOrderElt;
break;
}
}
}
}
if (classResource != null) {
// Found class resource
if (log != null) {
log.log("Scheduling external class for scanning: " + relationship + " " + className
+ (foundInClasspathElt == classpathElement ? ""
: " -- found in classpath element " + foundInClasspathElt));
}
if (additionalWorkUnits == null) {
additionalWorkUnits = new ArrayList<>();
}
// Schedule class resource for scanning
additionalWorkUnits.add(new ClassfileScanWorkUnit(foundInClasspathElt, classResource,
/* isExternalClass = */ true));
} else {
if (log != null) {
log.log("External " + relationship + " " + className + " was not found in "
+ "non-blacklisted packages -- cannot extend scanning to this class");
}
}
}
} }
|
public class class_name {
private void scheduleScanningIfExternalClass(final String className, final String relationship) {
// Don't scan Object
if (className != null && !className.equals("java.lang.Object")
// Only schedule each external class once for scanning, across all threads
&& classNamesScheduledForScanning.add(className)) {
// Search for the named class' classfile among classpath elements, in classpath order (this is O(N)
// for each class, but there shouldn't be too many cases of extending scanning upwards)
final String classfilePath = JarUtils.classNameToClassfilePath(className);
// First check current classpath element, to avoid iterating through other classpath elements
Resource classResource = classpathElement.getResource(classfilePath);
ClasspathElement foundInClasspathElt = null;
if (classResource != null) {
// Found the classfile in the current classpath element
foundInClasspathElt = classpathElement; // depends on control dependency: [if], data = [none]
} else {
// Didn't find the classfile in the current classpath element -- iterate through other elements
for (final ClasspathElement classpathOrderElt : classpathOrder) {
if (classpathOrderElt != classpathElement) {
classResource = classpathOrderElt.getResource(classfilePath); // depends on control dependency: [if], data = [none]
if (classResource != null) {
foundInClasspathElt = classpathOrderElt; // depends on control dependency: [if], data = [none]
break;
}
}
}
}
if (classResource != null) {
// Found class resource
if (log != null) {
log.log("Scheduling external class for scanning: " + relationship + " " + className
+ (foundInClasspathElt == classpathElement ? ""
: " -- found in classpath element " + foundInClasspathElt)); // depends on control dependency: [if], data = [none]
}
if (additionalWorkUnits == null) {
additionalWorkUnits = new ArrayList<>(); // depends on control dependency: [if], data = [none]
}
// Schedule class resource for scanning
additionalWorkUnits.add(new ClassfileScanWorkUnit(foundInClasspathElt, classResource,
/* isExternalClass = */ true)); // depends on control dependency: [if], data = [none]
} else {
if (log != null) {
log.log("External " + relationship + " " + className + " was not found in "
+ "non-blacklisted packages -- cannot extend scanning to this class"); // depends on control dependency: [if], data = [none]
}
}
}
} }
|
public class class_name {
private Object decodeResult(IoBuffer data) {
log.debug("decodeResult - data limit: {}", (data != null ? data.limit() : 0));
processHeaders(data);
Input input = new Input(data);
String target = null;
byte b = data.get();
//look for SOH
if (b == 0) {
log.debug("NUL: {}", b); //0
log.debug("SOH: {}", data.get()); //1
} else if (b == 1) {
log.debug("SOH: {}", b); //1
}
int targetUriLength = data.getShort();
log.debug("targetUri length: {}", targetUriLength);
target = input.readString(targetUriLength);
log.debug("NUL: {}", data.get()); //0
//should be junk bytes ff, ff, ff, ff
int count = data.getInt();
if (count == -1) {
log.debug("DC1: {}", data.get()); //17
count = 1;
} else {
data.position(data.position() - 4);
count = data.getShort();
}
if (count != 1) {
throw new RuntimeException("Expected exactly one result but got " + count);
}
String[] targetParts = target.split("[/]");
if (targetParts.length > 1) {
log.debug("Result sequence number: {}", targetParts[1]);
target = targetParts[2];
} else {
target = target.substring(1);
}
log.debug("Target: {}", target);
if ("onResult".equals(target)) {
//read return value
return input.readObject();
} else if ("onStatus".equals(target)) {
//read return value
return input.readObject();
}
//read return value
return Deserializer.deserialize(input, Object.class);
} }
|
public class class_name {
private Object decodeResult(IoBuffer data) {
log.debug("decodeResult - data limit: {}", (data != null ? data.limit() : 0));
processHeaders(data);
Input input = new Input(data);
String target = null;
byte b = data.get();
//look for SOH
if (b == 0) {
log.debug("NUL: {}", b); //0
// depends on control dependency: [if], data = [none]
log.debug("SOH: {}", data.get()); //1
// depends on control dependency: [if], data = [none]
} else if (b == 1) {
log.debug("SOH: {}", b); //1
// depends on control dependency: [if], data = [none]
}
int targetUriLength = data.getShort();
log.debug("targetUri length: {}", targetUriLength);
target = input.readString(targetUriLength);
log.debug("NUL: {}", data.get()); //0
//should be junk bytes ff, ff, ff, ff
int count = data.getInt();
if (count == -1) {
log.debug("DC1: {}", data.get()); //17
// depends on control dependency: [if], data = [none]
count = 1;
// depends on control dependency: [if], data = [none]
} else {
data.position(data.position() - 4);
// depends on control dependency: [if], data = [none]
count = data.getShort();
// depends on control dependency: [if], data = [none]
}
if (count != 1) {
throw new RuntimeException("Expected exactly one result but got " + count);
}
String[] targetParts = target.split("[/]");
if (targetParts.length > 1) {
log.debug("Result sequence number: {}", targetParts[1]);
// depends on control dependency: [if], data = [none]
target = targetParts[2];
// depends on control dependency: [if], data = [none]
} else {
target = target.substring(1);
// depends on control dependency: [if], data = [1)]
}
log.debug("Target: {}", target);
if ("onResult".equals(target)) {
//read return value
return input.readObject();
// depends on control dependency: [if], data = [none]
} else if ("onStatus".equals(target)) {
//read return value
return input.readObject();
// depends on control dependency: [if], data = [none]
}
//read return value
return Deserializer.deserialize(input, Object.class);
} }
|
public class class_name {
private void loadShardCache(TableDefinition tableDef) {
String appName = tableDef.getAppDef().getAppName();
String tableName = tableDef.getTableName();
m_logger.debug("Loading shard cache for {}.{}", appName, tableName);
Date cacheDate = new Date();
String cacheKey = appName + "/" + tableName;
m_cacheMap.put(cacheKey, cacheDate);
Map<String, Map<Integer, Date>> tableMap = m_appShardMap.get(appName);
if (tableMap == null) {
tableMap = new HashMap<>();
m_appShardMap.put(appName, tableMap);
}
Map<Integer, Date> shardMap = tableMap.get(tableName);
if (shardMap == null) {
shardMap = new HashMap<>();
tableMap.put(tableName, shardMap);
}
Tenant tenant = Tenant.getTenant(tableDef);
String storeName = SpiderService.termsStoreName(tableDef);
for(DColumn col: DBService.instance(tenant).getAllColumns(storeName, SpiderTransaction.SHARDS_ROW_KEY)) {
Integer shardNum = Integer.parseInt(col.getName());
Date shardDate = new Date(Long.parseLong(col.getValue()));
shardMap.put(shardNum, shardDate);
}
} }
|
public class class_name {
private void loadShardCache(TableDefinition tableDef) {
String appName = tableDef.getAppDef().getAppName();
String tableName = tableDef.getTableName();
m_logger.debug("Loading shard cache for {}.{}", appName, tableName);
Date cacheDate = new Date();
String cacheKey = appName + "/" + tableName;
m_cacheMap.put(cacheKey, cacheDate);
Map<String, Map<Integer, Date>> tableMap = m_appShardMap.get(appName);
if (tableMap == null) {
tableMap = new HashMap<>();
// depends on control dependency: [if], data = [none]
m_appShardMap.put(appName, tableMap);
// depends on control dependency: [if], data = [none]
}
Map<Integer, Date> shardMap = tableMap.get(tableName);
if (shardMap == null) {
shardMap = new HashMap<>();
// depends on control dependency: [if], data = [none]
tableMap.put(tableName, shardMap);
// depends on control dependency: [if], data = [none]
}
Tenant tenant = Tenant.getTenant(tableDef);
String storeName = SpiderService.termsStoreName(tableDef);
for(DColumn col: DBService.instance(tenant).getAllColumns(storeName, SpiderTransaction.SHARDS_ROW_KEY)) {
Integer shardNum = Integer.parseInt(col.getName());
Date shardDate = new Date(Long.parseLong(col.getValue()));
shardMap.put(shardNum, shardDate);
// depends on control dependency: [for], data = [none]
}
} }
|
public class class_name {
@Override
public CommerceNotificationQueueEntry fetchByLtS_First(Date sentDate,
OrderByComparator<CommerceNotificationQueueEntry> orderByComparator) {
List<CommerceNotificationQueueEntry> list = findByLtS(sentDate, 0, 1,
orderByComparator);
if (!list.isEmpty()) {
return list.get(0);
}
return null;
} }
|
public class class_name {
@Override
public CommerceNotificationQueueEntry fetchByLtS_First(Date sentDate,
OrderByComparator<CommerceNotificationQueueEntry> orderByComparator) {
List<CommerceNotificationQueueEntry> list = findByLtS(sentDate, 0, 1,
orderByComparator);
if (!list.isEmpty()) {
return list.get(0); // depends on control dependency: [if], data = [none]
}
return null;
} }
|
public class class_name {
public ClassicCounter<K2> setCounter(K1 o, Counter<K2> c) {
ClassicCounter<K2> old = getCounter(o);
total -= old.totalCount();
if (c instanceof ClassicCounter) {
map.put(o, (ClassicCounter<K2>) c);
} else {
map.put(o, new ClassicCounter<K2>(c));
}
total += c.totalCount();
return old;
} }
|
public class class_name {
public ClassicCounter<K2> setCounter(K1 o, Counter<K2> c) {
ClassicCounter<K2> old = getCounter(o);
total -= old.totalCount();
if (c instanceof ClassicCounter) {
map.put(o, (ClassicCounter<K2>) c);
// depends on control dependency: [if], data = [none]
} else {
map.put(o, new ClassicCounter<K2>(c));
// depends on control dependency: [if], data = [none]
}
total += c.totalCount();
return old;
} }
|
public class class_name {
static <K, V> RegularImmutableMap<K, V> fromEntryArray(int n, Entry<K, V>[] entryArray) {
checkPositionIndex(n, entryArray.length);
Entry<K, V>[] entries;
if (n == entryArray.length) {
entries = entryArray;
} else {
entries = createEntryArray(n);
}
int tableSize = Hashing.closedTableSize(n, MAX_LOAD_FACTOR);
ImmutableMapEntry<K, V>[] table = createEntryArray(tableSize);
int mask = tableSize - 1;
for (int entryIndex = 0; entryIndex < n; entryIndex++) {
Entry<K, V> entry = entryArray[entryIndex];
K key = entry.getKey();
V value = entry.getValue();
checkEntryNotNull(key, value);
int tableIndex = Hashing.smear(key.hashCode()) & mask;
@Nullable ImmutableMapEntry<K, V> existing = table[tableIndex];
// prepend, not append, so the entries can be immutable
ImmutableMapEntry<K, V> newEntry;
if (existing == null) {
boolean reusable =
entry instanceof ImmutableMapEntry && ((ImmutableMapEntry<K, V>) entry).isReusable();
newEntry =
reusable ? (ImmutableMapEntry<K, V>) entry : new ImmutableMapEntry<K, V>(key, value);
} else {
newEntry = new NonTerminalImmutableMapEntry<K, V>(key, value, existing);
}
table[tableIndex] = newEntry;
entries[entryIndex] = newEntry;
checkNoConflictInKeyBucket(key, newEntry, existing);
}
return new RegularImmutableMap<K, V>(entries, table, mask);
} }
|
public class class_name {
static <K, V> RegularImmutableMap<K, V> fromEntryArray(int n, Entry<K, V>[] entryArray) {
checkPositionIndex(n, entryArray.length);
Entry<K, V>[] entries;
if (n == entryArray.length) {
entries = entryArray; // depends on control dependency: [if], data = [none]
} else {
entries = createEntryArray(n); // depends on control dependency: [if], data = [(n]
}
int tableSize = Hashing.closedTableSize(n, MAX_LOAD_FACTOR);
ImmutableMapEntry<K, V>[] table = createEntryArray(tableSize);
int mask = tableSize - 1;
for (int entryIndex = 0; entryIndex < n; entryIndex++) {
Entry<K, V> entry = entryArray[entryIndex];
K key = entry.getKey();
V value = entry.getValue();
checkEntryNotNull(key, value); // depends on control dependency: [for], data = [none]
int tableIndex = Hashing.smear(key.hashCode()) & mask;
@Nullable ImmutableMapEntry<K, V> existing = table[tableIndex];
// prepend, not append, so the entries can be immutable
ImmutableMapEntry<K, V> newEntry;
if (existing == null) {
boolean reusable =
entry instanceof ImmutableMapEntry && ((ImmutableMapEntry<K, V>) entry).isReusable();
newEntry =
reusable ? (ImmutableMapEntry<K, V>) entry : new ImmutableMapEntry<K, V>(key, value); // depends on control dependency: [if], data = [none]
} else {
newEntry = new NonTerminalImmutableMapEntry<K, V>(key, value, existing); // depends on control dependency: [if], data = [none]
}
table[tableIndex] = newEntry; // depends on control dependency: [for], data = [none]
entries[entryIndex] = newEntry; // depends on control dependency: [for], data = [entryIndex]
checkNoConflictInKeyBucket(key, newEntry, existing); // depends on control dependency: [for], data = [none]
}
return new RegularImmutableMap<K, V>(entries, table, mask);
} }
|
public class class_name {
public Map<String, String> getInstanceTags() {
Map<String, String> map = new HashMap<>();
Instance instance = this.getInstance();
for (Tag tag : instance.getTags()) {
map.put(tag.getKey(), tag.getValue());
}
return map;
} }
|
public class class_name {
public Map<String, String> getInstanceTags() {
Map<String, String> map = new HashMap<>();
Instance instance = this.getInstance();
for (Tag tag : instance.getTags()) {
map.put(tag.getKey(), tag.getValue()); // depends on control dependency: [for], data = [tag]
}
return map;
} }
|
public class class_name {
public void setHostIds(java.util.Collection<String> hostIds) {
if (hostIds == null) {
this.hostIds = null;
return;
}
this.hostIds = new com.amazonaws.internal.SdkInternalList<String>(hostIds);
} }
|
public class class_name {
public void setHostIds(java.util.Collection<String> hostIds) {
if (hostIds == null) {
this.hostIds = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.hostIds = new com.amazonaws.internal.SdkInternalList<String>(hostIds);
} }
|
public class class_name {
private void handleFile(final InputGroup group) {
// Register a group start handler. The start handler will be called
// with the file name. Then we can open the file.
group.startHandler(new Handler<String>() {
@Override
public void handle(String fileName) {
final File file = new File(tempDir, String.format("temp-%s-%s", UUID.randomUUID().toString(), fileName));
input.vertx().fileSystem().open(file.getAbsolutePath(), new Handler<AsyncResult<AsyncFile>>() {
@Override
public void handle(AsyncResult<AsyncFile> result) {
if (result.succeeded()) {
handleFile(file.getAbsolutePath(), result.result(), group);
} else if (exceptionHandler != null) {
exceptionHandler.handle(result.cause());
}
}
});
}
});
} }
|
public class class_name {
private void handleFile(final InputGroup group) {
// Register a group start handler. The start handler will be called
// with the file name. Then we can open the file.
group.startHandler(new Handler<String>() {
@Override
public void handle(String fileName) {
final File file = new File(tempDir, String.format("temp-%s-%s", UUID.randomUUID().toString(), fileName));
input.vertx().fileSystem().open(file.getAbsolutePath(), new Handler<AsyncResult<AsyncFile>>() {
@Override
public void handle(AsyncResult<AsyncFile> result) {
if (result.succeeded()) {
handleFile(file.getAbsolutePath(), result.result(), group); // depends on control dependency: [if], data = [none]
} else if (exceptionHandler != null) {
exceptionHandler.handle(result.cause()); // depends on control dependency: [if], data = [none]
}
}
});
}
});
} }
|
public class class_name {
@Override
public final int compareTo(final Angle angle)
{
int comparison;
comparison = getField(Field.DEGREES) - angle.getField(Field.DEGREES);
if (comparison == 0)
{
comparison = getField(Field.MINUTES) - angle.getField(Field.MINUTES);
}
if (comparison == 0)
{
comparison = getField(Field.SECONDS) - angle.getField(Field.SECONDS);
}
return comparison;
} }
|
public class class_name {
@Override
public final int compareTo(final Angle angle)
{
int comparison;
comparison = getField(Field.DEGREES) - angle.getField(Field.DEGREES);
if (comparison == 0)
{
comparison = getField(Field.MINUTES) - angle.getField(Field.MINUTES); // depends on control dependency: [if], data = [none]
}
if (comparison == 0)
{
comparison = getField(Field.SECONDS) - angle.getField(Field.SECONDS); // depends on control dependency: [if], data = [none]
}
return comparison;
} }
|
public class class_name {
public void prepare() throws IOException
{
if (locks != null)
{
bckFiles = new File[files.length];
for (int i = 0,length = files.length; i < length; i++)
{
File file = files[i];
if (file.exists())
{
String fileName = file.getName().contains(".") ? file.getName().substring(0, file.getName().indexOf("."))
: file.getName();
bckFiles[i] =
new File(file.getParent(), fileName + "." + System.currentTimeMillis() + "_" + SEQUENCE.incrementAndGet());
move(file, bckFiles[i]);
}
}
}
} }
|
public class class_name {
public void prepare() throws IOException
{
if (locks != null)
{
bckFiles = new File[files.length];
for (int i = 0,length = files.length; i < length; i++)
{
File file = files[i];
if (file.exists())
{
String fileName = file.getName().contains(".") ? file.getName().substring(0, file.getName().indexOf("."))
: file.getName();
bckFiles[i] =
new File(file.getParent(), fileName + "." + System.currentTimeMillis() + "_" + SEQUENCE.incrementAndGet());
// depends on control dependency: [if], data = [none]
move(file, bckFiles[i]);
// depends on control dependency: [if], data = [none]
}
}
}
} }
|
public class class_name {
public static void sleep(final long ms) {
try {
Thread.sleep(ms);
} catch (InterruptedException iex) {
Thread.currentThread().interrupt();
}
} }
|
public class class_name {
public static void sleep(final long ms) {
try {
Thread.sleep(ms); // depends on control dependency: [try], data = [none]
} catch (InterruptedException iex) {
Thread.currentThread().interrupt();
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
@Override
public RepositoryResult<Statement> getStatements(Resource subj, URI pred, Value obj, boolean includeInferred, Resource... contexts) throws RepositoryException {
if (contexts == null) {
contexts = new Resource[] { null };
}
try {
if (isQuadMode()) {
StringBuilder sb = new StringBuilder();
sb.append("SELECT * WHERE { GRAPH ?ctx { ?s ?p ?o } filter (?ctx = (");
boolean first = true;
for (Resource context : contexts) {
if (first) {
first = !first;
}
else {
sb.append(",");
}
if (notNull(context)) {
sb.append("IRI(\"" + context.toString() + "\")");
} else {
sb.append("IRI(\""+DEFAULT_GRAPH_URI+"\")");
}
}
sb.append(") ) }");
TupleQuery tupleQuery = prepareTupleQuery(sb.toString());
tupleQuery.setIncludeInferred(includeInferred);
setBindings(tupleQuery, subj, pred, obj, (Resource) null);
TupleQueryResult qRes = tupleQuery.evaluate();
return new RepositoryResult<Statement>(
new ExceptionConvertingIteration<Statement, RepositoryException>(
toStatementIteration(qRes, subj, pred, obj)) {
@Override
protected RepositoryException convert(Exception e) {
return new RepositoryException(e);
}
});
} else if (subj != null && pred != null && obj != null) {
if (hasStatement(subj, pred, obj, includeInferred, contexts)) {
Statement st = new StatementImpl(subj, pred, obj);
CloseableIteration<Statement, RepositoryException> cursor;
cursor = new SingletonIteration<Statement, RepositoryException>(st);
return new RepositoryResult<Statement>(cursor);
} else {
return new RepositoryResult<Statement>(new EmptyIteration<Statement, RepositoryException>());
}
}
else {
MarkLogicGraphQuery query = prepareGraphQuery(EVERYTHING);
setBindings(query, subj, pred, obj, contexts);
GraphQueryResult result = query.evaluate();
return new RepositoryResult<Statement>(
new ExceptionConvertingIteration<Statement, RepositoryException>(result) {
@Override
protected RepositoryException convert(Exception e) {
return new RepositoryException(e);
}
});
}
} catch (MalformedQueryException e) {
throw new RepositoryException(e);
} catch (QueryEvaluationException e) {
throw new RepositoryException(e);
}
} }
|
public class class_name {
@Override
public RepositoryResult<Statement> getStatements(Resource subj, URI pred, Value obj, boolean includeInferred, Resource... contexts) throws RepositoryException {
if (contexts == null) {
contexts = new Resource[] { null }; // depends on control dependency: [if], data = [none]
}
try {
if (isQuadMode()) {
StringBuilder sb = new StringBuilder();
sb.append("SELECT * WHERE { GRAPH ?ctx { ?s ?p ?o } filter (?ctx = (");
boolean first = true;
for (Resource context : contexts) {
if (first) {
first = !first;
}
else {
sb.append(",");
}
if (notNull(context)) {
sb.append("IRI(\"" + context.toString() + "\")");
} else {
sb.append("IRI(\""+DEFAULT_GRAPH_URI+"\")");
}
}
sb.append(") ) }");
TupleQuery tupleQuery = prepareTupleQuery(sb.toString());
tupleQuery.setIncludeInferred(includeInferred);
setBindings(tupleQuery, subj, pred, obj, (Resource) null);
TupleQueryResult qRes = tupleQuery.evaluate();
return new RepositoryResult<Statement>(
new ExceptionConvertingIteration<Statement, RepositoryException>(
toStatementIteration(qRes, subj, pred, obj)) {
@Override
protected RepositoryException convert(Exception e) {
return new RepositoryException(e);
}
});
} else if (subj != null && pred != null && obj != null) {
if (hasStatement(subj, pred, obj, includeInferred, contexts)) {
Statement st = new StatementImpl(subj, pred, obj);
CloseableIteration<Statement, RepositoryException> cursor;
cursor = new SingletonIteration<Statement, RepositoryException>(st);
return new RepositoryResult<Statement>(cursor);
} else {
return new RepositoryResult<Statement>(new EmptyIteration<Statement, RepositoryException>());
}
}
else {
MarkLogicGraphQuery query = prepareGraphQuery(EVERYTHING);
setBindings(query, subj, pred, obj, contexts);
GraphQueryResult result = query.evaluate();
return new RepositoryResult<Statement>(
new ExceptionConvertingIteration<Statement, RepositoryException>(result) {
@Override
protected RepositoryException convert(Exception e) {
return new RepositoryException(e);
}
});
}
} catch (MalformedQueryException e) {
throw new RepositoryException(e);
} catch (QueryEvaluationException e) {
throw new RepositoryException(e);
}
} }
|
public class class_name {
@SuppressWarnings("unchecked")
public static <T> Set<T> getTableValue(final Table table) {
final Object value = table.getValue();
Set<T> idsReturn;
if (value == null) {
idsReturn = Collections.emptySet();
} else if (value instanceof Collection) {
final Collection<T> ids = (Collection<T>) value;
idsReturn = ids.stream().filter(Objects::nonNull).collect(Collectors.toSet());
} else {
final T id = (T) value;
idsReturn = Collections.singleton(id);
}
return idsReturn;
} }
|
public class class_name {
@SuppressWarnings("unchecked")
public static <T> Set<T> getTableValue(final Table table) {
final Object value = table.getValue();
Set<T> idsReturn;
if (value == null) {
idsReturn = Collections.emptySet(); // depends on control dependency: [if], data = [none]
} else if (value instanceof Collection) {
final Collection<T> ids = (Collection<T>) value;
idsReturn = ids.stream().filter(Objects::nonNull).collect(Collectors.toSet()); // depends on control dependency: [if], data = [none]
} else {
final T id = (T) value;
idsReturn = Collections.singleton(id); // depends on control dependency: [if], data = [none]
}
return idsReturn;
} }
|
public class class_name {
public EClass getIfcLightIntensityDistribution() {
if (ifcLightIntensityDistributionEClass == null) {
ifcLightIntensityDistributionEClass = (EClass) EPackage.Registry.INSTANCE
.getEPackage(Ifc2x3tc1Package.eNS_URI).getEClassifiers().get(292);
}
return ifcLightIntensityDistributionEClass;
} }
|
public class class_name {
public EClass getIfcLightIntensityDistribution() {
if (ifcLightIntensityDistributionEClass == null) {
ifcLightIntensityDistributionEClass = (EClass) EPackage.Registry.INSTANCE
.getEPackage(Ifc2x3tc1Package.eNS_URI).getEClassifiers().get(292);
// depends on control dependency: [if], data = [none]
}
return ifcLightIntensityDistributionEClass;
} }
|
public class class_name {
public static base_responses update(nitro_service client, dbdbprofile resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
dbdbprofile updateresources[] = new dbdbprofile[resources.length];
for (int i=0;i<resources.length;i++){
updateresources[i] = new dbdbprofile();
updateresources[i].name = resources[i].name;
updateresources[i].interpretquery = resources[i].interpretquery;
updateresources[i].stickiness = resources[i].stickiness;
updateresources[i].kcdaccount = resources[i].kcdaccount;
updateresources[i].conmultiplex = resources[i].conmultiplex;
}
result = update_bulk_request(client, updateresources);
}
return result;
} }
|
public class class_name {
public static base_responses update(nitro_service client, dbdbprofile resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
dbdbprofile updateresources[] = new dbdbprofile[resources.length];
for (int i=0;i<resources.length;i++){
updateresources[i] = new dbdbprofile(); // depends on control dependency: [for], data = [i]
updateresources[i].name = resources[i].name; // depends on control dependency: [for], data = [i]
updateresources[i].interpretquery = resources[i].interpretquery; // depends on control dependency: [for], data = [i]
updateresources[i].stickiness = resources[i].stickiness; // depends on control dependency: [for], data = [i]
updateresources[i].kcdaccount = resources[i].kcdaccount; // depends on control dependency: [for], data = [i]
updateresources[i].conmultiplex = resources[i].conmultiplex; // depends on control dependency: [for], data = [i]
}
result = update_bulk_request(client, updateresources);
}
return result;
} }
|
public class class_name {
public int compareSwappedTo(DoubleIntPair other) {
int fdiff = this.second - other.second;
if(fdiff != 0) {
return fdiff;
}
return Double.compare(this.second, other.second);
} }
|
public class class_name {
public int compareSwappedTo(DoubleIntPair other) {
int fdiff = this.second - other.second;
if(fdiff != 0) {
return fdiff; // depends on control dependency: [if], data = [none]
}
return Double.compare(this.second, other.second);
} }
|
public class class_name {
protected static HashMap<String, PropertyPair> generatePropertyPairs(final Class<?> clazzType) {
if (!TableServiceEntity.isReflectedEntityCacheDisabled()) {
HashMap<String, PropertyPair> props = new HashMap<String, PropertyPair>();
props = TableServiceEntity.getReflectedEntityCache().get(clazzType);
if (props == null) {
props = PropertyPair.generatePropertyPairsHelper(clazzType);
TableServiceEntity.getReflectedEntityCache().put(clazzType, props);
}
return props;
}
else {
return PropertyPair.generatePropertyPairsHelper(clazzType);
}
} }
|
public class class_name {
protected static HashMap<String, PropertyPair> generatePropertyPairs(final Class<?> clazzType) {
if (!TableServiceEntity.isReflectedEntityCacheDisabled()) {
HashMap<String, PropertyPair> props = new HashMap<String, PropertyPair>();
props = TableServiceEntity.getReflectedEntityCache().get(clazzType); // depends on control dependency: [if], data = [none]
if (props == null) {
props = PropertyPair.generatePropertyPairsHelper(clazzType); // depends on control dependency: [if], data = [none]
TableServiceEntity.getReflectedEntityCache().put(clazzType, props); // depends on control dependency: [if], data = [none]
}
return props; // depends on control dependency: [if], data = [none]
}
else {
return PropertyPair.generatePropertyPairsHelper(clazzType); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public C getCell(T item) {
C cell = pool.poll();
if(cell != null) {
cell.updateItem(item);
} else {
cell = cellFactory.apply(item);
}
return cell;
} }
|
public class class_name {
public C getCell(T item) {
C cell = pool.poll();
if(cell != null) {
cell.updateItem(item); // depends on control dependency: [if], data = [none]
} else {
cell = cellFactory.apply(item); // depends on control dependency: [if], data = [none]
}
return cell;
} }
|
public class class_name {
private static void handleLegacyWeightInitFromJson(String json, Layer layer, ObjectMapper mapper, JsonNode vertices) {
if (layer instanceof BaseLayer && ((BaseLayer) layer).getWeightInitFn() == null) {
String layerName = layer.getLayerName();
try {
if (vertices == null) {
JsonNode jsonNode = mapper.readTree(json);
vertices = jsonNode.get("vertices");
}
JsonNode vertexNode = vertices.get(layerName);
JsonNode layerVertexNode = vertexNode.get("LayerVertex");
if (layerVertexNode == null || !layerVertexNode.has("layerConf")
|| !layerVertexNode.get("layerConf").has("layer")) {
return;
}
JsonNode layerWrapperNode = layerVertexNode.get("layerConf").get("layer");
if (layerWrapperNode == null || layerWrapperNode.size() != 1) {
return;
}
JsonNode layerNode = layerWrapperNode.elements().next();
JsonNode weightInit = layerNode.get("weightInit"); //Should only have 1 element: "dense", "output", etc
JsonNode distribution = layerNode.get("dist");
Distribution dist = null;
if(distribution != null) {
dist = mapper.treeToValue(distribution, Distribution.class);
}
if (weightInit != null) {
final IWeightInit wi = WeightInit.valueOf(weightInit.asText()).getWeightInitFunction(dist);
((BaseLayer) layer).setWeightInitFn(wi);
}
} catch (IOException e) {
log.warn("Layer with null ActivationFn field or pre-0.7.2 activation function detected: could not parse JSON",
e);
}
}
} }
|
public class class_name {
private static void handleLegacyWeightInitFromJson(String json, Layer layer, ObjectMapper mapper, JsonNode vertices) {
if (layer instanceof BaseLayer && ((BaseLayer) layer).getWeightInitFn() == null) {
String layerName = layer.getLayerName();
try {
if (vertices == null) {
JsonNode jsonNode = mapper.readTree(json);
vertices = jsonNode.get("vertices"); // depends on control dependency: [if], data = [none]
}
JsonNode vertexNode = vertices.get(layerName);
JsonNode layerVertexNode = vertexNode.get("LayerVertex");
if (layerVertexNode == null || !layerVertexNode.has("layerConf")
|| !layerVertexNode.get("layerConf").has("layer")) {
return; // depends on control dependency: [if], data = [none]
}
JsonNode layerWrapperNode = layerVertexNode.get("layerConf").get("layer");
if (layerWrapperNode == null || layerWrapperNode.size() != 1) {
return; // depends on control dependency: [if], data = [none]
}
JsonNode layerNode = layerWrapperNode.elements().next();
JsonNode weightInit = layerNode.get("weightInit"); //Should only have 1 element: "dense", "output", etc
JsonNode distribution = layerNode.get("dist");
Distribution dist = null;
if(distribution != null) {
dist = mapper.treeToValue(distribution, Distribution.class); // depends on control dependency: [if], data = [(distribution]
}
if (weightInit != null) {
final IWeightInit wi = WeightInit.valueOf(weightInit.asText()).getWeightInitFunction(dist);
((BaseLayer) layer).setWeightInitFn(wi); // depends on control dependency: [if], data = [none]
}
} catch (IOException e) {
log.warn("Layer with null ActivationFn field or pre-0.7.2 activation function detected: could not parse JSON",
e);
} // depends on control dependency: [catch], data = [none]
}
} }
|
public class class_name {
public static <T extends Credential> T getCredential(final RequestContext context, final @NonNull Class<T> clazz) {
val credential = getCredential(context);
if (credential == null) {
return null;
}
if (!clazz.isAssignableFrom(credential.getClass())) {
throw new ClassCastException("credential [" + credential.getId()
+ " is of type " + credential.getClass()
+ " when we were expecting " + clazz);
}
return (T) credential;
} }
|
public class class_name {
public static <T extends Credential> T getCredential(final RequestContext context, final @NonNull Class<T> clazz) {
val credential = getCredential(context);
if (credential == null) {
return null; // depends on control dependency: [if], data = [none]
}
if (!clazz.isAssignableFrom(credential.getClass())) {
throw new ClassCastException("credential [" + credential.getId()
+ " is of type " + credential.getClass()
+ " when we were expecting " + clazz);
}
return (T) credential;
} }
|
public class class_name {
public void removeResourcesInProject(CmsUUID projectId, boolean removeSystemLocks) {
Iterator<CmsLock> itLocks = OpenCms.getMemoryMonitor().getAllCachedLocks().iterator();
while (itLocks.hasNext()) {
CmsLock currentLock = itLocks.next();
if (removeSystemLocks && currentLock.getSystemLock().getProjectId().equals(projectId)) {
unlockResource(currentLock.getResourceName(), true);
}
if (currentLock.getEditionLock().getProjectId().equals(projectId)) {
unlockResource(currentLock.getResourceName(), false);
}
}
} }
|
public class class_name {
public void removeResourcesInProject(CmsUUID projectId, boolean removeSystemLocks) {
Iterator<CmsLock> itLocks = OpenCms.getMemoryMonitor().getAllCachedLocks().iterator();
while (itLocks.hasNext()) {
CmsLock currentLock = itLocks.next();
if (removeSystemLocks && currentLock.getSystemLock().getProjectId().equals(projectId)) {
unlockResource(currentLock.getResourceName(), true); // depends on control dependency: [if], data = [none]
}
if (currentLock.getEditionLock().getProjectId().equals(projectId)) {
unlockResource(currentLock.getResourceName(), false); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public static boolean zipFolder(File folder, String fileName){
boolean success = false;
if(!folder.isDirectory()){
return false;
}
if(fileName == null){
fileName = folder.getAbsolutePath()+ZIP_EXT;
}
ZipArchiveOutputStream zipOutput = null;
try {
zipOutput = new ZipArchiveOutputStream(new File(fileName));
success = addFolderContentToZip(folder,zipOutput,"");
zipOutput.close();
} catch (IOException e) {
e.printStackTrace();
return false;
}
finally{
try {
if(zipOutput != null){
zipOutput.close();
}
} catch (IOException e) {}
}
return success;
} }
|
public class class_name {
public static boolean zipFolder(File folder, String fileName){
boolean success = false;
if(!folder.isDirectory()){
return false; // depends on control dependency: [if], data = [none]
}
if(fileName == null){
fileName = folder.getAbsolutePath()+ZIP_EXT; // depends on control dependency: [if], data = [none]
}
ZipArchiveOutputStream zipOutput = null;
try {
zipOutput = new ZipArchiveOutputStream(new File(fileName)); // depends on control dependency: [try], data = [none]
success = addFolderContentToZip(folder,zipOutput,""); // depends on control dependency: [try], data = [none]
zipOutput.close(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
e.printStackTrace();
return false;
} // depends on control dependency: [catch], data = [none]
finally{
try {
if(zipOutput != null){
zipOutput.close(); // depends on control dependency: [if], data = [none]
}
} catch (IOException e) {} // depends on control dependency: [catch], data = [none]
}
return success;
} }
|
public class class_name {
public void marshall(DescribeDimensionKeysRequest describeDimensionKeysRequest, ProtocolMarshaller protocolMarshaller) {
if (describeDimensionKeysRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeDimensionKeysRequest.getServiceType(), SERVICETYPE_BINDING);
protocolMarshaller.marshall(describeDimensionKeysRequest.getIdentifier(), IDENTIFIER_BINDING);
protocolMarshaller.marshall(describeDimensionKeysRequest.getStartTime(), STARTTIME_BINDING);
protocolMarshaller.marshall(describeDimensionKeysRequest.getEndTime(), ENDTIME_BINDING);
protocolMarshaller.marshall(describeDimensionKeysRequest.getMetric(), METRIC_BINDING);
protocolMarshaller.marshall(describeDimensionKeysRequest.getPeriodInSeconds(), PERIODINSECONDS_BINDING);
protocolMarshaller.marshall(describeDimensionKeysRequest.getGroupBy(), GROUPBY_BINDING);
protocolMarshaller.marshall(describeDimensionKeysRequest.getPartitionBy(), PARTITIONBY_BINDING);
protocolMarshaller.marshall(describeDimensionKeysRequest.getFilter(), FILTER_BINDING);
protocolMarshaller.marshall(describeDimensionKeysRequest.getMaxResults(), MAXRESULTS_BINDING);
protocolMarshaller.marshall(describeDimensionKeysRequest.getNextToken(), NEXTTOKEN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(DescribeDimensionKeysRequest describeDimensionKeysRequest, ProtocolMarshaller protocolMarshaller) {
if (describeDimensionKeysRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeDimensionKeysRequest.getServiceType(), SERVICETYPE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeDimensionKeysRequest.getIdentifier(), IDENTIFIER_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeDimensionKeysRequest.getStartTime(), STARTTIME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeDimensionKeysRequest.getEndTime(), ENDTIME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeDimensionKeysRequest.getMetric(), METRIC_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeDimensionKeysRequest.getPeriodInSeconds(), PERIODINSECONDS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeDimensionKeysRequest.getGroupBy(), GROUPBY_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeDimensionKeysRequest.getPartitionBy(), PARTITIONBY_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeDimensionKeysRequest.getFilter(), FILTER_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeDimensionKeysRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(describeDimensionKeysRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private boolean hasFilter(Filter filter, WhileMatchFilter whileMatchFilter) {
if (filter == whileMatchFilter) {
return true;
}
if (filter instanceof FilterList) {
FilterList list = (FilterList) filter;
for (Filter subFilter : list.getFilters()) {
if (hasFilter(subFilter, whileMatchFilter)) {
return true;
}
}
}
return false;
} }
|
public class class_name {
private boolean hasFilter(Filter filter, WhileMatchFilter whileMatchFilter) {
if (filter == whileMatchFilter) {
return true; // depends on control dependency: [if], data = [none]
}
if (filter instanceof FilterList) {
FilterList list = (FilterList) filter;
for (Filter subFilter : list.getFilters()) {
if (hasFilter(subFilter, whileMatchFilter)) {
return true; // depends on control dependency: [if], data = [none]
}
}
}
return false;
} }
|
public class class_name {
public void marshall(ThingTypeMetadata thingTypeMetadata, ProtocolMarshaller protocolMarshaller) {
if (thingTypeMetadata == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(thingTypeMetadata.getDeprecated(), DEPRECATED_BINDING);
protocolMarshaller.marshall(thingTypeMetadata.getDeprecationDate(), DEPRECATIONDATE_BINDING);
protocolMarshaller.marshall(thingTypeMetadata.getCreationDate(), CREATIONDATE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(ThingTypeMetadata thingTypeMetadata, ProtocolMarshaller protocolMarshaller) {
if (thingTypeMetadata == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(thingTypeMetadata.getDeprecated(), DEPRECATED_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(thingTypeMetadata.getDeprecationDate(), DEPRECATIONDATE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(thingTypeMetadata.getCreationDate(), CREATIONDATE_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private static AbstractPlanNode addCoordinatorToDMLNode(
AbstractPlanNode dmlRoot, boolean isReplicated) {
dmlRoot = SubPlanAssembler.addSendReceivePair(dmlRoot);
AbstractPlanNode sumOrLimitNode;
if (isReplicated) {
// Replicated table DML result doesn't need to be summed. All partitions should
// modify the same number of tuples in replicated table, so just pick the result from
// any partition.
LimitPlanNode limitNode = new LimitPlanNode();
sumOrLimitNode = limitNode;
limitNode.setLimit(1);
}
else {
// create the nodes being pushed on top of dmlRoot.
AggregatePlanNode countNode = new AggregatePlanNode();
sumOrLimitNode = countNode;
// configure the count aggregate (sum) node to produce a single
// output column containing the result of the sum.
// Create a TVE that should match the tuple count input column
// This TVE is magic.
// really really need to make this less hard-wired
TupleValueExpression count_tve = new TupleValueExpression(
AbstractParsedStmt.TEMP_TABLE_NAME,
AbstractParsedStmt.TEMP_TABLE_NAME,
"modified_tuples",
"modified_tuples",
0);
count_tve.setValueType(VoltType.BIGINT);
count_tve.setValueSize(VoltType.BIGINT.getLengthInBytesForFixedTypes());
countNode.addAggregate(ExpressionType.AGGREGATE_SUM, false, 0, count_tve);
// The output column. Not really based on a TVE (it is really the
// count expression represented by the count configured above). But
// this is sufficient for now. This looks identical to the above
// TVE but it's logically different so we'll create a fresh one.
TupleValueExpression tve = new TupleValueExpression(
AbstractParsedStmt.TEMP_TABLE_NAME,
AbstractParsedStmt.TEMP_TABLE_NAME,
"modified_tuples",
"modified_tuples",
0);
tve.setValueType(VoltType.BIGINT);
tve.setValueSize(VoltType.BIGINT.getLengthInBytesForFixedTypes());
NodeSchema count_schema = new NodeSchema();
count_schema.addColumn(
AbstractParsedStmt.TEMP_TABLE_NAME,
AbstractParsedStmt.TEMP_TABLE_NAME,
"modified_tuples",
"modified_tuples",
tve);
countNode.setOutputSchema(count_schema);
}
// connect the nodes to build the graph
sumOrLimitNode.addAndLinkChild(dmlRoot);
SendPlanNode sendNode = new SendPlanNode();
sendNode.addAndLinkChild(sumOrLimitNode);
return sendNode;
} }
|
public class class_name {
private static AbstractPlanNode addCoordinatorToDMLNode(
AbstractPlanNode dmlRoot, boolean isReplicated) {
dmlRoot = SubPlanAssembler.addSendReceivePair(dmlRoot);
AbstractPlanNode sumOrLimitNode;
if (isReplicated) {
// Replicated table DML result doesn't need to be summed. All partitions should
// modify the same number of tuples in replicated table, so just pick the result from
// any partition.
LimitPlanNode limitNode = new LimitPlanNode();
sumOrLimitNode = limitNode; // depends on control dependency: [if], data = [none]
limitNode.setLimit(1); // depends on control dependency: [if], data = [none]
}
else {
// create the nodes being pushed on top of dmlRoot.
AggregatePlanNode countNode = new AggregatePlanNode();
sumOrLimitNode = countNode; // depends on control dependency: [if], data = [none]
// configure the count aggregate (sum) node to produce a single
// output column containing the result of the sum.
// Create a TVE that should match the tuple count input column
// This TVE is magic.
// really really need to make this less hard-wired
TupleValueExpression count_tve = new TupleValueExpression(
AbstractParsedStmt.TEMP_TABLE_NAME,
AbstractParsedStmt.TEMP_TABLE_NAME,
"modified_tuples",
"modified_tuples",
0);
count_tve.setValueType(VoltType.BIGINT); // depends on control dependency: [if], data = [none]
count_tve.setValueSize(VoltType.BIGINT.getLengthInBytesForFixedTypes()); // depends on control dependency: [if], data = [none]
countNode.addAggregate(ExpressionType.AGGREGATE_SUM, false, 0, count_tve); // depends on control dependency: [if], data = [none]
// The output column. Not really based on a TVE (it is really the
// count expression represented by the count configured above). But
// this is sufficient for now. This looks identical to the above
// TVE but it's logically different so we'll create a fresh one.
TupleValueExpression tve = new TupleValueExpression(
AbstractParsedStmt.TEMP_TABLE_NAME,
AbstractParsedStmt.TEMP_TABLE_NAME,
"modified_tuples",
"modified_tuples",
0);
tve.setValueType(VoltType.BIGINT); // depends on control dependency: [if], data = [none]
tve.setValueSize(VoltType.BIGINT.getLengthInBytesForFixedTypes()); // depends on control dependency: [if], data = [none]
NodeSchema count_schema = new NodeSchema();
count_schema.addColumn(
AbstractParsedStmt.TEMP_TABLE_NAME,
AbstractParsedStmt.TEMP_TABLE_NAME,
"modified_tuples",
"modified_tuples",
tve); // depends on control dependency: [if], data = [none]
countNode.setOutputSchema(count_schema); // depends on control dependency: [if], data = [none]
}
// connect the nodes to build the graph
sumOrLimitNode.addAndLinkChild(dmlRoot);
SendPlanNode sendNode = new SendPlanNode();
sendNode.addAndLinkChild(sumOrLimitNode);
return sendNode;
} }
|
public class class_name {
public static SARLPackageExplorerPart openInActivePerspective() {
try {
return (SARLPackageExplorerPart) JavaPlugin.getActivePage().showView(ID_PACKAGES);
} catch (PartInitException exception) {
return null;
}
} }
|
public class class_name {
public static SARLPackageExplorerPart openInActivePerspective() {
try {
return (SARLPackageExplorerPart) JavaPlugin.getActivePage().showView(ID_PACKAGES); // depends on control dependency: [try], data = [none]
} catch (PartInitException exception) {
return null;
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private static boolean isLDHChar(int ch){
// high runner case
if(ch>0x007A){
return false;
}
//[\\u002D \\u0030-\\u0039 \\u0041-\\u005A \\u0061-\\u007A]
if( (ch==0x002D) ||
(0x0030 <= ch && ch <= 0x0039) ||
(0x0041 <= ch && ch <= 0x005A) ||
(0x0061 <= ch && ch <= 0x007A)
){
return true;
}
return false;
} }
|
public class class_name {
private static boolean isLDHChar(int ch){
// high runner case
if(ch>0x007A){
return false; // depends on control dependency: [if], data = [none]
}
//[\\u002D \\u0030-\\u0039 \\u0041-\\u005A \\u0061-\\u007A]
if( (ch==0x002D) ||
(0x0030 <= ch && ch <= 0x0039) ||
(0x0041 <= ch && ch <= 0x005A) ||
(0x0061 <= ch && ch <= 0x007A)
){
return true; // depends on control dependency: [if], data = [none]
}
return false;
} }
|
public class class_name {
public ValidationResult check(Entry entry) {
result = new ValidationResult();
if (entry == null) {
return result;
}
//collect all gene features
Collection<Feature> geneFeatures =
SequenceEntryUtils.getFeaturesContainingQualifier(Qualifier.GENE_QUALIFIER_NAME, entry);
if (geneFeatures.isEmpty()) {
return result;
}
/**
* which gene qualifier is associated with which locus tag qualifier - should be 1 to 1
*/
HashMap<String, String> gene2locusTag = new HashMap<String, String>();
/**
* firstly, build up a list of all genes associated with only 1 locus_tag - ignore those associated with
* more than 1 locus_tag as we are just looking for clean, 1 to 1 relationships.
*/
for (Feature geneFeature : geneFeatures) {
/**
* we know this contains a gene qualifier cos that's how we built the list
*/
List<Qualifier> geneQualifiers = geneFeature.getQualifiers(Qualifier.GENE_QUALIFIER_NAME);
String geneName = geneQualifiers.get(0).getValue();
if(geneQualifiers.size() > 1){
continue;//just leave it - other checks for this, should only be 1
}
int locusTagCount =
SequenceEntryUtils.getFeatureQualifierCount(Qualifier.LOCUS_TAG_QUALIFIER_NAME, geneFeature);
if (locusTagCount > 1) {
continue;//just leave it - other checks for this
} else if (locusTagCount == 1) {
String currentLocusTag = geneFeature.getQualifiers(Qualifier.LOCUS_TAG_QUALIFIER_NAME).get(0).getValue();
if (gene2locusTag.containsKey(geneName)) {
/**
* if the locus tag already associated with this gene has a different value, bail out for this gene.
* other checks will point out that there needs to be a 1 to 1 correspondence.
*/
if(!gene2locusTag.get(geneName).equals(currentLocusTag)){
gene2locusTag.remove(geneName);//remove existing mapping as this is now not clearly the intended mapping
continue;
}
} else {
//this locus tag is now reserved by this gene qualifier
gene2locusTag.put(geneName, currentLocusTag);
}
}
}
QualifierFactory qualifierFactory = new QualifierFactory();
/**
* then add the locus_tag to all features that do not have any and share the same gene qualifier as another
* feature that does have a locus_tag associated
*/
for (Feature geneFeature : geneFeatures) {
String geneName = geneFeature.getQualifiers(Qualifier.GENE_QUALIFIER_NAME).get(0).getValue();
int locusTagCount =
SequenceEntryUtils.getFeatureQualifierCount(Qualifier.LOCUS_TAG_QUALIFIER_NAME, geneFeature);
if (locusTagCount == 0) {
if(gene2locusTag.containsKey(geneName)){
String locus_tag = gene2locusTag.get(geneName);
Qualifier locusQualifier =
qualifierFactory.createQualifier(Qualifier.LOCUS_TAG_QUALIFIER_NAME, locus_tag);
geneFeature.addQualifier(locusQualifier);
reportMessage(Severity.FIX, geneFeature.getOrigin(), MESSAGE_ID, locus_tag, geneName);
}
}
}
return result;
} }
|
public class class_name {
public ValidationResult check(Entry entry) {
result = new ValidationResult();
if (entry == null) {
return result; // depends on control dependency: [if], data = [none]
}
//collect all gene features
Collection<Feature> geneFeatures =
SequenceEntryUtils.getFeaturesContainingQualifier(Qualifier.GENE_QUALIFIER_NAME, entry);
if (geneFeatures.isEmpty()) {
return result; // depends on control dependency: [if], data = [none]
}
/**
* which gene qualifier is associated with which locus tag qualifier - should be 1 to 1
*/
HashMap<String, String> gene2locusTag = new HashMap<String, String>();
/**
* firstly, build up a list of all genes associated with only 1 locus_tag - ignore those associated with
* more than 1 locus_tag as we are just looking for clean, 1 to 1 relationships.
*/
for (Feature geneFeature : geneFeatures) {
/**
* we know this contains a gene qualifier cos that's how we built the list
*/
List<Qualifier> geneQualifiers = geneFeature.getQualifiers(Qualifier.GENE_QUALIFIER_NAME);
String geneName = geneQualifiers.get(0).getValue();
if(geneQualifiers.size() > 1){
continue;//just leave it - other checks for this, should only be 1
}
int locusTagCount =
SequenceEntryUtils.getFeatureQualifierCount(Qualifier.LOCUS_TAG_QUALIFIER_NAME, geneFeature);
if (locusTagCount > 1) {
continue;//just leave it - other checks for this
} else if (locusTagCount == 1) {
String currentLocusTag = geneFeature.getQualifiers(Qualifier.LOCUS_TAG_QUALIFIER_NAME).get(0).getValue();
if (gene2locusTag.containsKey(geneName)) {
/**
* if the locus tag already associated with this gene has a different value, bail out for this gene.
* other checks will point out that there needs to be a 1 to 1 correspondence.
*/
if(!gene2locusTag.get(geneName).equals(currentLocusTag)){
gene2locusTag.remove(geneName);//remove existing mapping as this is now not clearly the intended mapping // depends on control dependency: [if], data = [none]
continue;
}
} else {
//this locus tag is now reserved by this gene qualifier
gene2locusTag.put(geneName, currentLocusTag); // depends on control dependency: [if], data = [none]
}
}
}
QualifierFactory qualifierFactory = new QualifierFactory();
/**
* then add the locus_tag to all features that do not have any and share the same gene qualifier as another
* feature that does have a locus_tag associated
*/
for (Feature geneFeature : geneFeatures) {
String geneName = geneFeature.getQualifiers(Qualifier.GENE_QUALIFIER_NAME).get(0).getValue();
int locusTagCount =
SequenceEntryUtils.getFeatureQualifierCount(Qualifier.LOCUS_TAG_QUALIFIER_NAME, geneFeature);
if (locusTagCount == 0) {
if(gene2locusTag.containsKey(geneName)){
String locus_tag = gene2locusTag.get(geneName);
Qualifier locusQualifier =
qualifierFactory.createQualifier(Qualifier.LOCUS_TAG_QUALIFIER_NAME, locus_tag);
geneFeature.addQualifier(locusQualifier); // depends on control dependency: [if], data = [none]
reportMessage(Severity.FIX, geneFeature.getOrigin(), MESSAGE_ID, locus_tag, geneName); // depends on control dependency: [if], data = [none]
}
}
}
return result;
} }
|
public class class_name {
@Nullable
private static ObjectType getSuperClassInstanceIfFinal(@Nullable JSType type) {
if (type == null) {
return null;
}
ObjectType obj = type.toObjectType();
FunctionType ctor = (obj == null) ? null : obj.getSuperClassConstructor();
JSDocInfo doc = (ctor == null) ? null : ctor.getJSDocInfo();
if (doc != null && doc.isFinal()) {
return ctor.getInstanceType();
}
return null;
} }
|
public class class_name {
@Nullable
private static ObjectType getSuperClassInstanceIfFinal(@Nullable JSType type) {
if (type == null) {
return null; // depends on control dependency: [if], data = [none]
}
ObjectType obj = type.toObjectType();
FunctionType ctor = (obj == null) ? null : obj.getSuperClassConstructor();
JSDocInfo doc = (ctor == null) ? null : ctor.getJSDocInfo();
if (doc != null && doc.isFinal()) {
return ctor.getInstanceType(); // depends on control dependency: [if], data = [none]
}
return null;
} }
|
public class class_name {
public <T> T retryOperation(ZooKeeperOperation<T> operation) throws KeeperException, InterruptedException {
if (!running.get()) {
throw new ArbitrateException("Zookeeper is destory ,should never be used ....");
}
KeeperException exception = null;
for (int i = 0; i < maxRetry; i++) {
int version = cversion.get(); // 获取版本
int retryCount = i + 1;
try {
if (!zookeeper.getState().isAlive()) {
retryDelay(retryCount);
cleanup(version);
} else {
return (T) operation.execute();
}
} catch (KeeperException.SessionExpiredException e) {
logger.warn("Session expired for: " + this + " so reconnecting " + (i + 1) + " times due to: " + e, e);
retryDelay(retryCount);
cleanup(version);
} catch (KeeperException.ConnectionLossException e) { // 特殊处理Connection Loss
if (exception == null) {
exception = e;
}
logger.warn("Attempt " + retryCount + " failed with connection loss so " + "attempting to reconnect: "
+ e, e);
retryDelay(retryCount);
}
}
throw exception;
} }
|
public class class_name {
public <T> T retryOperation(ZooKeeperOperation<T> operation) throws KeeperException, InterruptedException {
if (!running.get()) {
throw new ArbitrateException("Zookeeper is destory ,should never be used ....");
}
KeeperException exception = null;
for (int i = 0; i < maxRetry; i++) {
int version = cversion.get(); // 获取版本
int retryCount = i + 1;
try {
if (!zookeeper.getState().isAlive()) {
retryDelay(retryCount); // depends on control dependency: [if], data = [none]
cleanup(version); // depends on control dependency: [if], data = [none]
} else {
return (T) operation.execute(); // depends on control dependency: [if], data = [none]
}
} catch (KeeperException.SessionExpiredException e) {
logger.warn("Session expired for: " + this + " so reconnecting " + (i + 1) + " times due to: " + e, e);
retryDelay(retryCount);
cleanup(version);
} catch (KeeperException.ConnectionLossException e) { // 特殊处理Connection Loss
if (exception == null) {
exception = e; // depends on control dependency: [if], data = [none]
}
logger.warn("Attempt " + retryCount + " failed with connection loss so " + "attempting to reconnect: "
+ e, e);
retryDelay(retryCount);
}
}
throw exception;
} }
|
public class class_name {
private static void checkType(
com.netflix.servo.annotations.Monitor anno, Class<?> type, Class<?> container) {
if (!isNumericType(type)) {
final String msg = "annotation of type " + anno.type().name() + " can only be used"
+ " with numeric values, " + anno.name() + " in class " + container.getName()
+ " is applied to a field or method of type " + type.getName();
throw new IllegalArgumentException(msg);
}
} }
|
public class class_name {
private static void checkType(
com.netflix.servo.annotations.Monitor anno, Class<?> type, Class<?> container) {
if (!isNumericType(type)) {
final String msg = "annotation of type " + anno.type().name() + " can only be used" // depends on control dependency: [if], data = [none]
+ " with numeric values, " + anno.name() + " in class " + container.getName()
+ " is applied to a field or method of type " + type.getName();
throw new IllegalArgumentException(msg);
}
} }
|
public class class_name {
private void putToSessionCache(Identity oid, CacheEntry entry, boolean onlyIfNew)
{
if(onlyIfNew)
{
// no synchronization needed, because session cache was used per broker instance
if(!sessionCache.containsKey(oid)) sessionCache.put(oid, entry);
}
else
{
sessionCache.put(oid, entry);
}
} }
|
public class class_name {
private void putToSessionCache(Identity oid, CacheEntry entry, boolean onlyIfNew)
{
if(onlyIfNew)
{
// no synchronization needed, because session cache was used per broker instance
if(!sessionCache.containsKey(oid)) sessionCache.put(oid, entry);
}
else
{
sessionCache.put(oid, entry);
// depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public <Solution> Collection<Variable<Solution, ?>> createFromGetters(
Class<Solution> solutionClass) {
Collection<Variable<Solution, ?>> variables = new LinkedList<>();
for (Method method : solutionClass.getMethods()) {
if (isGetter(method)) {
String name = method.getName().substring(3);
variables.add(createVariableOn(solutionClass, method, name,
method.getReturnType()));
} else {
// not a getter, ignore it
}
}
return variables;
} }
|
public class class_name {
public <Solution> Collection<Variable<Solution, ?>> createFromGetters(
Class<Solution> solutionClass) {
Collection<Variable<Solution, ?>> variables = new LinkedList<>();
for (Method method : solutionClass.getMethods()) {
if (isGetter(method)) {
String name = method.getName().substring(3);
variables.add(createVariableOn(solutionClass, method, name,
method.getReturnType())); // depends on control dependency: [if], data = [none]
} else {
// not a getter, ignore it
}
}
return variables;
} }
|
public class class_name {
private BlockingExecutor getExecutor(Work work)
{
BlockingExecutor executor = shortRunningExecutor;
if (longRunningExecutor != null && WorkManagerUtil.isLongRunning(work))
{
executor = longRunningExecutor;
}
fireHintsComplete(work);
return executor;
} }
|
public class class_name {
private BlockingExecutor getExecutor(Work work)
{
BlockingExecutor executor = shortRunningExecutor;
if (longRunningExecutor != null && WorkManagerUtil.isLongRunning(work))
{
executor = longRunningExecutor; // depends on control dependency: [if], data = [none]
}
fireHintsComplete(work);
return executor;
} }
|
public class class_name {
public void setExclusionPreviews(java.util.Collection<ExclusionPreview> exclusionPreviews) {
if (exclusionPreviews == null) {
this.exclusionPreviews = null;
return;
}
this.exclusionPreviews = new java.util.ArrayList<ExclusionPreview>(exclusionPreviews);
} }
|
public class class_name {
public void setExclusionPreviews(java.util.Collection<ExclusionPreview> exclusionPreviews) {
if (exclusionPreviews == null) {
this.exclusionPreviews = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.exclusionPreviews = new java.util.ArrayList<ExclusionPreview>(exclusionPreviews);
} }
|
public class class_name {
public void init(String dc_sync_period, String resources_keep_alive_period, String manager_ip, String manager_port) {
if (registryInitialized)
throw new RuntimeException("Registry was already initialized");
if (dc_sync_period != null) {
CONFIG_SYNC_PERIOD = Integer.parseInt(dc_sync_period);
}
if (resources_keep_alive_period != null) {
KEEP_ALIVE = Integer.parseInt(resources_keep_alive_period);
}
// Build metrics
providedMetrics = buildProvidedMetrics();
// Build the DCAgent
dcAgent = new DCAgent(new ManagerAPI(manager_ip, Integer.parseInt(manager_port)));
// Add observers of metrics to the DCAgent
for (Metric metric : providedMetrics) {
logger.debug("Added metric {} as observer of dcagent",
metric.getMonitoredMetric());
dcAgent.addObserver(metric);
}
// Build the DCDescriptor
DCDescriptor dcDescriptor = new DCDescriptor();
dcDescriptor.addMonitoredResources(getProvidedMetrics(), getResources());
dcDescriptor.addResources(getResources());
dcDescriptor
.setConfigSyncPeriod(CONFIG_SYNC_PERIOD != null ? CONFIG_SYNC_PERIOD
: DEFAULT_CONFIG_SYNC_PERIOD);
dcDescriptor.setKeepAlive(KEEP_ALIVE != null ? KEEP_ALIVE
: (DEFAULT_CONFIG_SYNC_PERIOD + 15));
dcAgent.setDCDescriptor(dcDescriptor);
registryInitialized = true;
} }
|
public class class_name {
public void init(String dc_sync_period, String resources_keep_alive_period, String manager_ip, String manager_port) {
if (registryInitialized)
throw new RuntimeException("Registry was already initialized");
if (dc_sync_period != null) {
CONFIG_SYNC_PERIOD = Integer.parseInt(dc_sync_period); // depends on control dependency: [if], data = [(dc_sync_period]
}
if (resources_keep_alive_period != null) {
KEEP_ALIVE = Integer.parseInt(resources_keep_alive_period); // depends on control dependency: [if], data = [(resources_keep_alive_period]
}
// Build metrics
providedMetrics = buildProvidedMetrics();
// Build the DCAgent
dcAgent = new DCAgent(new ManagerAPI(manager_ip, Integer.parseInt(manager_port)));
// Add observers of metrics to the DCAgent
for (Metric metric : providedMetrics) {
logger.debug("Added metric {} as observer of dcagent",
metric.getMonitoredMetric()); // depends on control dependency: [for], data = [metric]
dcAgent.addObserver(metric); // depends on control dependency: [for], data = [metric]
}
// Build the DCDescriptor
DCDescriptor dcDescriptor = new DCDescriptor();
dcDescriptor.addMonitoredResources(getProvidedMetrics(), getResources());
dcDescriptor.addResources(getResources());
dcDescriptor
.setConfigSyncPeriod(CONFIG_SYNC_PERIOD != null ? CONFIG_SYNC_PERIOD
: DEFAULT_CONFIG_SYNC_PERIOD);
dcDescriptor.setKeepAlive(KEEP_ALIVE != null ? KEEP_ALIVE
: (DEFAULT_CONFIG_SYNC_PERIOD + 15));
dcAgent.setDCDescriptor(dcDescriptor);
registryInitialized = true;
} }
|
public class class_name {
public void filter(String name, EsAbstractConditionQuery.OperatorCall<BsDataConfigCQ> queryLambda,
ConditionOptionCall<FilterAggregationBuilder> opLambda, OperatorCall<BsDataConfigCA> aggsLambda) {
DataConfigCQ cq = new DataConfigCQ();
if (queryLambda != null) {
queryLambda.callback(cq);
}
FilterAggregationBuilder builder = regFilterA(name, cq.getQuery());
if (opLambda != null) {
opLambda.callback(builder);
}
if (aggsLambda != null) {
DataConfigCA ca = new DataConfigCA();
aggsLambda.callback(ca);
ca.getAggregationBuilderList().forEach(builder::subAggregation);
}
} }
|
public class class_name {
public void filter(String name, EsAbstractConditionQuery.OperatorCall<BsDataConfigCQ> queryLambda,
ConditionOptionCall<FilterAggregationBuilder> opLambda, OperatorCall<BsDataConfigCA> aggsLambda) {
DataConfigCQ cq = new DataConfigCQ();
if (queryLambda != null) {
queryLambda.callback(cq); // depends on control dependency: [if], data = [none]
}
FilterAggregationBuilder builder = regFilterA(name, cq.getQuery());
if (opLambda != null) {
opLambda.callback(builder); // depends on control dependency: [if], data = [none]
}
if (aggsLambda != null) {
DataConfigCA ca = new DataConfigCA();
aggsLambda.callback(ca); // depends on control dependency: [if], data = [none]
ca.getAggregationBuilderList().forEach(builder::subAggregation); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public void replace(List list, String resourceVersion) {
lock.writeLock().lock();
try {
Set<String> keys = new HashSet<>();
for (Object obj : list) {
String key = this.keyOf(obj);
keys.add(key);
this.queueActionLocked(DeltaType.Sync, obj);
}
if (this.knownObjects == null) {
for (Map.Entry<String, Deque<MutablePair<DeltaType, Object>>> entry :
this.items.entrySet()) {
if (keys.contains(entry.getKey())) {
continue;
}
Object deletedObj = null;
MutablePair<DeltaType, Object> delta = entry.getValue().peekLast(); // get newest
if (delta != null) {
deletedObj = delta.getRight();
}
this.queueActionLocked(
DeltaType.Deleted, new DeletedFinalStateUnknown(entry.getKey(), deletedObj));
}
if (!this.populated) {
this.populated = true;
this.initialPopulationCount = list.size();
}
return;
}
// Detect deletions not already in the queue.
List<String> knownKeys = this.knownObjects.listKeys();
int queueDeletion = 0;
for (String knownKey : knownKeys) {
if (keys.contains(knownKey)) {
continue;
}
Object deletedObj = this.knownObjects.getByKey(knownKey);
if (deletedObj == null) {
log.warn(
"Key {} does not exist in known objects store, placing DeleteFinalStateUnknown marker without object",
knownKey);
}
queueDeletion++;
this.queueActionLocked(
DeltaType.Deleted, new DeletedFinalStateUnknown<>(knownKey, deletedObj));
}
if (!this.populated) {
this.populated = true;
this.initialPopulationCount = list.size() + queueDeletion;
}
} finally {
lock.writeLock().unlock();
}
} }
|
public class class_name {
@Override
public void replace(List list, String resourceVersion) {
lock.writeLock().lock();
try {
Set<String> keys = new HashSet<>();
for (Object obj : list) {
String key = this.keyOf(obj);
keys.add(key); // depends on control dependency: [for], data = [none]
this.queueActionLocked(DeltaType.Sync, obj); // depends on control dependency: [for], data = [obj]
}
if (this.knownObjects == null) {
for (Map.Entry<String, Deque<MutablePair<DeltaType, Object>>> entry :
this.items.entrySet()) {
if (keys.contains(entry.getKey())) {
continue;
}
Object deletedObj = null;
MutablePair<DeltaType, Object> delta = entry.getValue().peekLast(); // get newest
if (delta != null) {
deletedObj = delta.getRight(); // depends on control dependency: [if], data = [none]
}
this.queueActionLocked(
DeltaType.Deleted, new DeletedFinalStateUnknown(entry.getKey(), deletedObj)); // depends on control dependency: [for], data = [none]
}
if (!this.populated) {
this.populated = true; // depends on control dependency: [if], data = [none]
this.initialPopulationCount = list.size(); // depends on control dependency: [if], data = [none]
}
return; // depends on control dependency: [if], data = [none]
}
// Detect deletions not already in the queue.
List<String> knownKeys = this.knownObjects.listKeys();
int queueDeletion = 0;
for (String knownKey : knownKeys) {
if (keys.contains(knownKey)) {
continue;
}
Object deletedObj = this.knownObjects.getByKey(knownKey);
if (deletedObj == null) {
log.warn(
"Key {} does not exist in known objects store, placing DeleteFinalStateUnknown marker without object",
knownKey); // depends on control dependency: [if], data = [none]
}
queueDeletion++; // depends on control dependency: [for], data = [none]
this.queueActionLocked(
DeltaType.Deleted, new DeletedFinalStateUnknown<>(knownKey, deletedObj)); // depends on control dependency: [for], data = [none]
}
if (!this.populated) {
this.populated = true; // depends on control dependency: [if], data = [none]
this.initialPopulationCount = list.size() + queueDeletion; // depends on control dependency: [if], data = [none]
}
} finally {
lock.writeLock().unlock();
}
} }
|
public class class_name {
public Result<Series> createSeries(Series series) {
checkNotNull(series);
URI uri = null;
try {
URIBuilder builder = new URIBuilder(String.format("/%s/series/", API_VERSION));
uri = builder.build();
} catch (URISyntaxException e) {
String message = "Could not build URI";
throw new IllegalArgumentException(message, e);
}
Result<Series> result = null;
String body = null;
try {
body = Json.dumps(series);
} catch (JsonProcessingException e) {
String message = "Error serializing the body of the request. More detail: " + e.getMessage();
result = new Result<Series>(null, GENERIC_ERROR_CODE, message);
return result;
}
HttpRequest request = buildRequest(uri.toString(), HttpMethod.POST, body);
result = execute(request, Series.class);
return result;
} }
|
public class class_name {
public Result<Series> createSeries(Series series) {
checkNotNull(series);
URI uri = null;
try {
URIBuilder builder = new URIBuilder(String.format("/%s/series/", API_VERSION));
uri = builder.build(); // depends on control dependency: [try], data = [none]
} catch (URISyntaxException e) {
String message = "Could not build URI";
throw new IllegalArgumentException(message, e);
} // depends on control dependency: [catch], data = [none]
Result<Series> result = null;
String body = null;
try {
body = Json.dumps(series); // depends on control dependency: [try], data = [none]
} catch (JsonProcessingException e) {
String message = "Error serializing the body of the request. More detail: " + e.getMessage();
result = new Result<Series>(null, GENERIC_ERROR_CODE, message);
return result;
} // depends on control dependency: [catch], data = [none]
HttpRequest request = buildRequest(uri.toString(), HttpMethod.POST, body);
result = execute(request, Series.class);
return result;
} }
|
public class class_name {
private static HashMap<String, PropertyPair> generatePropertyPairsHelper(final Class<?> clazzType) {
final Method[] methods = clazzType.getMethods();
final HashMap<String, PropertyPair> propMap = new HashMap<String, PropertyPair>();
String propName = null;
PropertyPair currProperty = null;
for (final Method m : methods) {
if (m.getName().length() < 4 || (!m.getName().startsWith("get") && !m.getName().startsWith("set"))) {
continue;
}
propName = m.getName().substring(3);
// Skip interface methods, these will be called explicitly
if (propName.equals(TableConstants.PARTITION_KEY) || propName.equals(TableConstants.ROW_KEY)
|| propName.equals(TableConstants.TIMESTAMP) || propName.equals("Etag")
|| propName.equals("LastModified")) {
continue;
}
if (propMap.containsKey(propName)) {
currProperty = propMap.get(propName);
}
else {
currProperty = new PropertyPair();
currProperty.name = propName;
propMap.put(propName, currProperty);
}
if (m.getName().startsWith("get") && m.getParameterTypes().length == 0) {
currProperty.type = m.getReturnType();
currProperty.getter = m;
}
else if (m.getName().startsWith("set") && m.getParameterTypes().length == 1
&& void.class.equals(m.getReturnType())) {
currProperty.setter = m;
}
// Check for StoreAs Annotation
final StoreAs storeAsInstance = m.getAnnotation(StoreAs.class);
if (storeAsInstance != null) {
if (Utility.isNullOrEmpty(storeAsInstance.name())) {
throw new IllegalArgumentException(String.format(SR.STOREAS_USED_ON_EMPTY_PROPERTY,
currProperty.name));
}
if (currProperty.effectiveName != null && !currProperty.effectiveName.equals(currProperty.name)
&& !currProperty.effectiveName.equals(storeAsInstance.name())) {
throw new IllegalArgumentException(String.format(SR.STOREAS_DIFFERENT_FOR_GETTER_AND_SETTER,
currProperty.name));
}
if (!currProperty.name.equals(storeAsInstance.name())) {
currProperty.effectiveName = storeAsInstance.name();
}
}
}
// Return only processable pairs
final ArrayList<String> keysToRemove = new ArrayList<String>();
final ArrayList<String> keysToAlter = new ArrayList<String>();
for (final Entry<String, PropertyPair> e : propMap.entrySet()) {
if (!e.getValue().shouldProcess()) {
keysToRemove.add(e.getKey());
continue;
}
if (!Utility.isNullOrEmpty(e.getValue().effectiveName)) {
keysToAlter.add(e.getKey());
}
else {
e.getValue().effectiveName = e.getValue().name;
}
}
// remove all entries for keys that should not process
for (final String key : keysToRemove) {
propMap.remove(key);
}
// Any store as properties should be re-stored into the hash under the efective name.
for (final String key : keysToAlter) {
final PropertyPair p = propMap.get(key);
propMap.remove(key);
propMap.put(p.effectiveName, p);
}
return propMap;
} }
|
public class class_name {
private static HashMap<String, PropertyPair> generatePropertyPairsHelper(final Class<?> clazzType) {
final Method[] methods = clazzType.getMethods();
final HashMap<String, PropertyPair> propMap = new HashMap<String, PropertyPair>();
String propName = null;
PropertyPair currProperty = null;
for (final Method m : methods) {
if (m.getName().length() < 4 || (!m.getName().startsWith("get") && !m.getName().startsWith("set"))) {
continue;
}
propName = m.getName().substring(3); // depends on control dependency: [for], data = [m]
// Skip interface methods, these will be called explicitly
if (propName.equals(TableConstants.PARTITION_KEY) || propName.equals(TableConstants.ROW_KEY)
|| propName.equals(TableConstants.TIMESTAMP) || propName.equals("Etag")
|| propName.equals("LastModified")) {
continue;
}
if (propMap.containsKey(propName)) {
currProperty = propMap.get(propName); // depends on control dependency: [if], data = [none]
}
else {
currProperty = new PropertyPair(); // depends on control dependency: [if], data = [none]
currProperty.name = propName; // depends on control dependency: [if], data = [none]
propMap.put(propName, currProperty); // depends on control dependency: [if], data = [none]
}
if (m.getName().startsWith("get") && m.getParameterTypes().length == 0) {
currProperty.type = m.getReturnType(); // depends on control dependency: [if], data = [none]
currProperty.getter = m; // depends on control dependency: [if], data = [none]
}
else if (m.getName().startsWith("set") && m.getParameterTypes().length == 1
&& void.class.equals(m.getReturnType())) {
currProperty.setter = m; // depends on control dependency: [if], data = [none]
}
// Check for StoreAs Annotation
final StoreAs storeAsInstance = m.getAnnotation(StoreAs.class);
if (storeAsInstance != null) {
if (Utility.isNullOrEmpty(storeAsInstance.name())) {
throw new IllegalArgumentException(String.format(SR.STOREAS_USED_ON_EMPTY_PROPERTY,
currProperty.name));
}
if (currProperty.effectiveName != null && !currProperty.effectiveName.equals(currProperty.name)
&& !currProperty.effectiveName.equals(storeAsInstance.name())) {
throw new IllegalArgumentException(String.format(SR.STOREAS_DIFFERENT_FOR_GETTER_AND_SETTER,
currProperty.name));
}
if (!currProperty.name.equals(storeAsInstance.name())) {
currProperty.effectiveName = storeAsInstance.name(); // depends on control dependency: [if], data = [none]
}
}
}
// Return only processable pairs
final ArrayList<String> keysToRemove = new ArrayList<String>();
final ArrayList<String> keysToAlter = new ArrayList<String>();
for (final Entry<String, PropertyPair> e : propMap.entrySet()) {
if (!e.getValue().shouldProcess()) {
keysToRemove.add(e.getKey()); // depends on control dependency: [if], data = [none]
continue;
}
if (!Utility.isNullOrEmpty(e.getValue().effectiveName)) {
keysToAlter.add(e.getKey()); // depends on control dependency: [if], data = [none]
}
else {
e.getValue().effectiveName = e.getValue().name; // depends on control dependency: [if], data = [none]
}
}
// remove all entries for keys that should not process
for (final String key : keysToRemove) {
propMap.remove(key); // depends on control dependency: [for], data = [key]
}
// Any store as properties should be re-stored into the hash under the efective name.
for (final String key : keysToAlter) {
final PropertyPair p = propMap.get(key);
propMap.remove(key); // depends on control dependency: [for], data = [key]
propMap.put(p.effectiveName, p); // depends on control dependency: [for], data = [none]
}
return propMap;
} }
|
public class class_name {
public static <T> boolean replaceAll(List<T> list, T oldVal, T newVal) {
boolean result = false;
int size = list.size();
if (size < REPLACEALL_THRESHOLD || list instanceof RandomAccess) {
if (oldVal==null) {
for (int i=0; i<size; i++) {
if (list.get(i)==null) {
list.set(i, newVal);
result = true;
}
}
} else {
for (int i=0; i<size; i++) {
if (oldVal.equals(list.get(i))) {
list.set(i, newVal);
result = true;
}
}
}
} else {
ListIterator<T> itr=list.listIterator();
if (oldVal==null) {
for (int i=0; i<size; i++) {
if (itr.next()==null) {
itr.set(newVal);
result = true;
}
}
} else {
for (int i=0; i<size; i++) {
if (oldVal.equals(itr.next())) {
itr.set(newVal);
result = true;
}
}
}
}
return result;
} }
|
public class class_name {
public static <T> boolean replaceAll(List<T> list, T oldVal, T newVal) {
boolean result = false;
int size = list.size();
if (size < REPLACEALL_THRESHOLD || list instanceof RandomAccess) {
if (oldVal==null) {
for (int i=0; i<size; i++) {
if (list.get(i)==null) {
list.set(i, newVal); // depends on control dependency: [if], data = [none]
result = true; // depends on control dependency: [if], data = [none]
}
}
} else {
for (int i=0; i<size; i++) {
if (oldVal.equals(list.get(i))) {
list.set(i, newVal); // depends on control dependency: [if], data = [none]
result = true; // depends on control dependency: [if], data = [none]
}
}
}
} else {
ListIterator<T> itr=list.listIterator();
if (oldVal==null) {
for (int i=0; i<size; i++) {
if (itr.next()==null) {
itr.set(newVal); // depends on control dependency: [if], data = [none]
result = true; // depends on control dependency: [if], data = [none]
}
}
} else {
for (int i=0; i<size; i++) {
if (oldVal.equals(itr.next())) {
itr.set(newVal); // depends on control dependency: [if], data = [none]
result = true; // depends on control dependency: [if], data = [none]
}
}
}
}
return result;
} }
|
public class class_name {
protected long[] convertValueToArray(final Object value) {
if (value instanceof Collection) {
final Collection collection = (Collection) value;
final long[] target = new long[collection.size()];
int i = 0;
for (final Object element : collection) {
target[i] = convertType(element);
i++;
}
return target;
}
if (value instanceof Iterable) {
final Iterable iterable = (Iterable) value;
final ArrayList<Long> longArrayList = new ArrayList<>();
for (final Object element : iterable) {
final long convertedValue = convertType(element);
longArrayList.add(Long.valueOf(convertedValue));
}
final long[] array = new long[longArrayList.size()];
for (int i = 0; i < longArrayList.size(); i++) {
final Long l = longArrayList.get(i);
array[i] = l.longValue();
}
return array;
}
if (value instanceof CharSequence) {
final String[] strings = StringUtil.splitc(value.toString(), ArrayConverter.NUMBER_DELIMITERS);
return convertArrayToArray(strings);
}
// everything else:
return convertToSingleElementArray(value);
} }
|
public class class_name {
protected long[] convertValueToArray(final Object value) {
if (value instanceof Collection) {
final Collection collection = (Collection) value;
final long[] target = new long[collection.size()];
int i = 0;
for (final Object element : collection) {
target[i] = convertType(element); // depends on control dependency: [for], data = [element]
i++; // depends on control dependency: [for], data = [none]
}
return target; // depends on control dependency: [if], data = [none]
}
if (value instanceof Iterable) {
final Iterable iterable = (Iterable) value;
final ArrayList<Long> longArrayList = new ArrayList<>();
for (final Object element : iterable) {
final long convertedValue = convertType(element);
longArrayList.add(Long.valueOf(convertedValue)); // depends on control dependency: [for], data = [none]
}
final long[] array = new long[longArrayList.size()];
for (int i = 0; i < longArrayList.size(); i++) {
final Long l = longArrayList.get(i);
array[i] = l.longValue(); // depends on control dependency: [for], data = [i]
}
return array; // depends on control dependency: [if], data = [none]
}
if (value instanceof CharSequence) {
final String[] strings = StringUtil.splitc(value.toString(), ArrayConverter.NUMBER_DELIMITERS);
return convertArrayToArray(strings); // depends on control dependency: [if], data = [none]
}
// everything else:
return convertToSingleElementArray(value);
} }
|
public class class_name {
public static TileGrid getTileGrid(BoundingBox totalBox, long matrixWidth,
long matrixHeight, BoundingBox boundingBox) {
long minColumn = getTileColumn(totalBox, matrixWidth,
boundingBox.getMinLongitude());
long maxColumn = getTileColumn(totalBox, matrixWidth,
boundingBox.getMaxLongitude());
if (minColumn < matrixWidth && maxColumn >= 0) {
if (minColumn < 0) {
minColumn = 0;
}
if (maxColumn >= matrixWidth) {
maxColumn = matrixWidth - 1;
}
}
long maxRow = getTileRow(totalBox, matrixHeight,
boundingBox.getMinLatitude());
long minRow = getTileRow(totalBox, matrixHeight,
boundingBox.getMaxLatitude());
if (minRow < matrixHeight && maxRow >= 0) {
if (minRow < 0) {
minRow = 0;
}
if (maxRow >= matrixHeight) {
maxRow = matrixHeight - 1;
}
}
TileGrid tileGrid = new TileGrid(minColumn, minRow, maxColumn, maxRow);
return tileGrid;
} }
|
public class class_name {
public static TileGrid getTileGrid(BoundingBox totalBox, long matrixWidth,
long matrixHeight, BoundingBox boundingBox) {
long minColumn = getTileColumn(totalBox, matrixWidth,
boundingBox.getMinLongitude());
long maxColumn = getTileColumn(totalBox, matrixWidth,
boundingBox.getMaxLongitude());
if (minColumn < matrixWidth && maxColumn >= 0) {
if (minColumn < 0) {
minColumn = 0; // depends on control dependency: [if], data = [none]
}
if (maxColumn >= matrixWidth) {
maxColumn = matrixWidth - 1; // depends on control dependency: [if], data = [none]
}
}
long maxRow = getTileRow(totalBox, matrixHeight,
boundingBox.getMinLatitude());
long minRow = getTileRow(totalBox, matrixHeight,
boundingBox.getMaxLatitude());
if (minRow < matrixHeight && maxRow >= 0) {
if (minRow < 0) {
minRow = 0; // depends on control dependency: [if], data = [none]
}
if (maxRow >= matrixHeight) {
maxRow = matrixHeight - 1; // depends on control dependency: [if], data = [none]
}
}
TileGrid tileGrid = new TileGrid(minColumn, minRow, maxColumn, maxRow);
return tileGrid;
} }
|
public class class_name {
private List<DataSource> parseParts(final InputStream entityInputStream,
final String contentType) {
try {
return parsePartsCore(entityInputStream, contentType);
} catch (IOException e) {
throw new RuntimeException(e);
} catch (MessagingException e) {
throw new RuntimeException(e);
}
} }
|
public class class_name {
private List<DataSource> parseParts(final InputStream entityInputStream,
final String contentType) {
try {
return parsePartsCore(entityInputStream, contentType); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
throw new RuntimeException(e);
} catch (MessagingException e) { // depends on control dependency: [catch], data = [none]
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public ArrayList getThreads()
{
if (tc.isEntryEnabled())
{
SibTr.entry(tc, "getThreads");
SibTr.exit(tc, "getThreads", _threadCache);
}
return _threadCache;
} }
|
public class class_name {
public ArrayList getThreads()
{
if (tc.isEntryEnabled())
{
SibTr.entry(tc, "getThreads"); // depends on control dependency: [if], data = [none]
SibTr.exit(tc, "getThreads", _threadCache); // depends on control dependency: [if], data = [none]
}
return _threadCache;
} }
|
public class class_name {
public Iterator<Map.Entry<FieldDescriptorType, Object>> iterator() {
if (hasLazyField) {
return new LazyIterator<FieldDescriptorType>(
fields.entrySet().iterator());
}
return fields.entrySet().iterator();
} }
|
public class class_name {
public Iterator<Map.Entry<FieldDescriptorType, Object>> iterator() {
if (hasLazyField) {
return new LazyIterator<FieldDescriptorType>(
fields.entrySet().iterator()); // depends on control dependency: [if], data = [none]
}
return fields.entrySet().iterator();
} }
|
public class class_name {
private static void addProvider(){
if (Security.getProvider("BC") == null) {
LogUtil.writeLog("add BC provider");
Security.addProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider());
} else {
Security.removeProvider("BC"); //解决eclipse调试时tomcat自动重新加载时,BC存在不明原因异常的问题。
Security.addProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider());
LogUtil.writeLog("re-add BC provider");
}
printSysInfo();
} }
|
public class class_name {
private static void addProvider(){
if (Security.getProvider("BC") == null) {
LogUtil.writeLog("add BC provider"); // depends on control dependency: [if], data = [none]
Security.addProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()); // depends on control dependency: [if], data = [none]
} else {
Security.removeProvider("BC"); //解决eclipse调试时tomcat自动重新加载时,BC存在不明原因异常的问题。 // depends on control dependency: [if], data = [none]
Security.addProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()); // depends on control dependency: [if], data = [none]
LogUtil.writeLog("re-add BC provider"); // depends on control dependency: [if], data = [none]
}
printSysInfo();
} }
|
public class class_name {
@Override
protected void onScrollChanged(int l, int t, int oldl, int oldt) {
super.onScrollChanged(l, t, oldl, oldt);
if (mTrackedChild == null) {
if (getChildCount() > 0) {
mTrackedChild = getChildInTheMiddle();
mTrackedChildPrevTop = mTrackedChild.getTop();
mTrackedChildPrevPosition = getPositionForView(mTrackedChild);
}
} else {
boolean childIsSafeToTrack = mTrackedChild.getParent() == this && getPositionForView(mTrackedChild) == mTrackedChildPrevPosition;
if (childIsSafeToTrack) {
int top = mTrackedChild.getTop();
float deltaY = top - mTrackedChildPrevTop;
if (deltaY == 0) {
//When we scroll so fast the list this value becomes 0 all the time
// so we don't want the other list stop, and we give it the last
//no 0 value we have
deltaY = OldDeltaY;
} else {
OldDeltaY = deltaY;
}
updateChildrenControlPoints(deltaY);
mTrackedChildPrevTop = top;
} else {
mTrackedChild = null;
}
}
} }
|
public class class_name {
@Override
protected void onScrollChanged(int l, int t, int oldl, int oldt) {
super.onScrollChanged(l, t, oldl, oldt);
if (mTrackedChild == null) {
if (getChildCount() > 0) {
mTrackedChild = getChildInTheMiddle(); // depends on control dependency: [if], data = [none]
mTrackedChildPrevTop = mTrackedChild.getTop(); // depends on control dependency: [if], data = [none]
mTrackedChildPrevPosition = getPositionForView(mTrackedChild); // depends on control dependency: [if], data = [none]
}
} else {
boolean childIsSafeToTrack = mTrackedChild.getParent() == this && getPositionForView(mTrackedChild) == mTrackedChildPrevPosition;
if (childIsSafeToTrack) {
int top = mTrackedChild.getTop();
float deltaY = top - mTrackedChildPrevTop;
if (deltaY == 0) {
//When we scroll so fast the list this value becomes 0 all the time
// so we don't want the other list stop, and we give it the last
//no 0 value we have
deltaY = OldDeltaY; // depends on control dependency: [if], data = [none]
} else {
OldDeltaY = deltaY; // depends on control dependency: [if], data = [none]
}
updateChildrenControlPoints(deltaY); // depends on control dependency: [if], data = [none]
mTrackedChildPrevTop = top; // depends on control dependency: [if], data = [none]
} else {
mTrackedChild = null; // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public void link(final CCTask task, final File outputFile, final String[] sourceFiles,
final CommandLineLinkerConfiguration config) throws BuildException {
final File parentDir = new File(outputFile.getParent());
String parentPath;
try {
parentPath = parentDir.getCanonicalPath();
} catch (final IOException ex) {
parentPath = parentDir.getAbsolutePath();
}
String[] execArgs = prepareArguments(task, parentPath, outputFile.getName(), sourceFiles, config);
int commandLength = 0;
for (final String execArg : execArgs) {
commandLength += execArg.length() + 1;
}
//
// if command length exceeds maximum
// then create a temporary
// file containing everything but the command name
if (commandLength >= this.getMaximumCommandLength()) {
try {
execArgs = prepareResponseFile(outputFile, execArgs);
} catch (final IOException ex) {
throw new BuildException(ex);
}
}
final int retval = runCommand(task, parentDir, execArgs);
//
// if the process returned a failure code then
// throw an BuildException
//
if (retval != 0) {
//
// construct the exception
//
throw new BuildException(getCommandWithPath(config) + " failed with return code " + retval, task.getLocation());
}
} }
|
public class class_name {
public void link(final CCTask task, final File outputFile, final String[] sourceFiles,
final CommandLineLinkerConfiguration config) throws BuildException {
final File parentDir = new File(outputFile.getParent());
String parentPath;
try {
parentPath = parentDir.getCanonicalPath();
} catch (final IOException ex) {
parentPath = parentDir.getAbsolutePath();
}
String[] execArgs = prepareArguments(task, parentPath, outputFile.getName(), sourceFiles, config);
int commandLength = 0;
for (final String execArg : execArgs) {
commandLength += execArg.length() + 1;
}
//
// if command length exceeds maximum
// then create a temporary
// file containing everything but the command name
if (commandLength >= this.getMaximumCommandLength()) {
try {
execArgs = prepareResponseFile(outputFile, execArgs); // depends on control dependency: [try], data = [none]
} catch (final IOException ex) {
throw new BuildException(ex);
} // depends on control dependency: [catch], data = [none]
}
final int retval = runCommand(task, parentDir, execArgs);
//
// if the process returned a failure code then
// throw an BuildException
//
if (retval != 0) {
//
// construct the exception
//
throw new BuildException(getCommandWithPath(config) + " failed with return code " + retval, task.getLocation());
}
} }
|
public class class_name {
@SuppressWarnings("unused")
private boolean processBoxShadow(Declaration d,
Map<String, CSSProperty> properties, Map<String, Term<?>> values) {
if (d.size() == 1 && genericOneIdent(BoxShadow.class, d, properties)) {
return true;
}
// inset? && <length>{2,4} && <color>?
TermList list = tf.createList();
int lengthCount = 0;
int lastLengthIndex = -1;
int insetIndex = -1;
int colorIndex = -1;
for (int i = 0; i < d.size(); i++) {
Term t = d.get(i);
if (t.getOperator() == Operator.COMMA) {
if (lengthCount < 2) {
return false;
}
lengthCount = 0;
lastLengthIndex = -1;
insetIndex = -1;
colorIndex = -1;
}
if (t instanceof TermColor && colorIndex < 0) {
colorIndex = i;
} else if (t instanceof TermIdent
&& ((TermIdent) t).getValue().equalsIgnoreCase("inset")
&& insetIndex < 0) {
insetIndex = i;
} else if (t instanceof TermLength
&& lastLengthIndex < 0
|| (lastLengthIndex > insetIndex && lastLengthIndex > colorIndex)) {
if (lengthCount >= 4) {
return false;
}
lastLengthIndex = i;
lengthCount++;
} else {
return false;
}
list.add(t);
}
if (lengthCount < 2) {
return false;
}
properties.put(d.getProperty(), BoxShadow.component_values);
values.put(d.getProperty(), list);
return true;
} }
|
public class class_name {
@SuppressWarnings("unused")
private boolean processBoxShadow(Declaration d,
Map<String, CSSProperty> properties, Map<String, Term<?>> values) {
if (d.size() == 1 && genericOneIdent(BoxShadow.class, d, properties)) {
return true;
}
// inset? && <length>{2,4} && <color>?
TermList list = tf.createList();
int lengthCount = 0;
int lastLengthIndex = -1;
int insetIndex = -1;
int colorIndex = -1;
for (int i = 0; i < d.size(); i++) {
Term t = d.get(i);
if (t.getOperator() == Operator.COMMA) {
if (lengthCount < 2) {
return false; // depends on control dependency: [if], data = [none]
}
lengthCount = 0;
lastLengthIndex = -1;
insetIndex = -1;
colorIndex = -1;
}
if (t instanceof TermColor && colorIndex < 0) {
colorIndex = i;
} else if (t instanceof TermIdent
&& ((TermIdent) t).getValue().equalsIgnoreCase("inset")
&& insetIndex < 0) {
insetIndex = i;
} else if (t instanceof TermLength
&& lastLengthIndex < 0
|| (lastLengthIndex > insetIndex && lastLengthIndex > colorIndex)) {
if (lengthCount >= 4) {
return false; // depends on control dependency: [if], data = [none]
}
lastLengthIndex = i;
lengthCount++;
} else {
return false;
}
list.add(t);
}
if (lengthCount < 2) {
return false;
}
properties.put(d.getProperty(), BoxShadow.component_values);
values.put(d.getProperty(), list);
return true;
} }
|
public class class_name {
public void marshall(JobFlowInstancesConfig jobFlowInstancesConfig, ProtocolMarshaller protocolMarshaller) {
if (jobFlowInstancesConfig == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(jobFlowInstancesConfig.getMasterInstanceType(), MASTERINSTANCETYPE_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getSlaveInstanceType(), SLAVEINSTANCETYPE_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getInstanceCount(), INSTANCECOUNT_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getInstanceGroups(), INSTANCEGROUPS_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getInstanceFleets(), INSTANCEFLEETS_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getEc2KeyName(), EC2KEYNAME_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getPlacement(), PLACEMENT_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getKeepJobFlowAliveWhenNoSteps(), KEEPJOBFLOWALIVEWHENNOSTEPS_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getTerminationProtected(), TERMINATIONPROTECTED_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getHadoopVersion(), HADOOPVERSION_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getEc2SubnetId(), EC2SUBNETID_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getEc2SubnetIds(), EC2SUBNETIDS_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getEmrManagedMasterSecurityGroup(), EMRMANAGEDMASTERSECURITYGROUP_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getEmrManagedSlaveSecurityGroup(), EMRMANAGEDSLAVESECURITYGROUP_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getServiceAccessSecurityGroup(), SERVICEACCESSSECURITYGROUP_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getAdditionalMasterSecurityGroups(), ADDITIONALMASTERSECURITYGROUPS_BINDING);
protocolMarshaller.marshall(jobFlowInstancesConfig.getAdditionalSlaveSecurityGroups(), ADDITIONALSLAVESECURITYGROUPS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(JobFlowInstancesConfig jobFlowInstancesConfig, ProtocolMarshaller protocolMarshaller) {
if (jobFlowInstancesConfig == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(jobFlowInstancesConfig.getMasterInstanceType(), MASTERINSTANCETYPE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getSlaveInstanceType(), SLAVEINSTANCETYPE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getInstanceCount(), INSTANCECOUNT_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getInstanceGroups(), INSTANCEGROUPS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getInstanceFleets(), INSTANCEFLEETS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getEc2KeyName(), EC2KEYNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getPlacement(), PLACEMENT_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getKeepJobFlowAliveWhenNoSteps(), KEEPJOBFLOWALIVEWHENNOSTEPS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getTerminationProtected(), TERMINATIONPROTECTED_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getHadoopVersion(), HADOOPVERSION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getEc2SubnetId(), EC2SUBNETID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getEc2SubnetIds(), EC2SUBNETIDS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getEmrManagedMasterSecurityGroup(), EMRMANAGEDMASTERSECURITYGROUP_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getEmrManagedSlaveSecurityGroup(), EMRMANAGEDSLAVESECURITYGROUP_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getServiceAccessSecurityGroup(), SERVICEACCESSSECURITYGROUP_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getAdditionalMasterSecurityGroups(), ADDITIONALMASTERSECURITYGROUPS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(jobFlowInstancesConfig.getAdditionalSlaveSecurityGroups(), ADDITIONALSLAVESECURITYGROUPS_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static FilterBuilder buildFilter(Criteria criteria) {
if (criteria.getTransaction() != null && criteria.getTransaction().trim().isEmpty()) {
return FilterBuilders.missingFilter(TRANSACTION_FIELD);
}
return null;
} }
|
public class class_name {
public static FilterBuilder buildFilter(Criteria criteria) {
if (criteria.getTransaction() != null && criteria.getTransaction().trim().isEmpty()) {
return FilterBuilders.missingFilter(TRANSACTION_FIELD); // depends on control dependency: [if], data = [none]
}
return null;
} }
|
public class class_name {
private List<CmsContainerBean> collectModelStructure(
String modelInstanceId,
String replaceModelId,
Map<String, List<CmsContainerBean>> containerByParent,
boolean isCopyGroup) {
List<CmsContainerBean> result = new ArrayList<CmsContainerBean>();
if (containerByParent.containsKey(modelInstanceId)) {
for (CmsContainerBean container : containerByParent.get(modelInstanceId)) {
String adjustedContainerName = replaceModelId + container.getName().substring(modelInstanceId.length());
List<CmsContainerElementBean> elements = new ArrayList<CmsContainerElementBean>();
for (CmsContainerElementBean element : container.getElements()) {
CmsContainerElementBean copyElement = adjustSettings(
element,
container.getName(),
adjustedContainerName,
isCopyGroup);
if (m_sessionCache != null) {
m_sessionCache.setCacheContainerElement(copyElement.editorHash(), copyElement);
}
elements.add(copyElement);
result.addAll(
collectModelStructure(
element.getInstanceId(),
copyElement.getInstanceId(),
containerByParent,
isCopyGroup));
}
result.add(
new CmsContainerBean(
adjustedContainerName,
container.getType(),
replaceModelId,
container.isRootContainer(),
container.getMaxElements(),
elements));
}
}
return result;
} }
|
public class class_name {
private List<CmsContainerBean> collectModelStructure(
String modelInstanceId,
String replaceModelId,
Map<String, List<CmsContainerBean>> containerByParent,
boolean isCopyGroup) {
List<CmsContainerBean> result = new ArrayList<CmsContainerBean>();
if (containerByParent.containsKey(modelInstanceId)) {
for (CmsContainerBean container : containerByParent.get(modelInstanceId)) {
String adjustedContainerName = replaceModelId + container.getName().substring(modelInstanceId.length());
List<CmsContainerElementBean> elements = new ArrayList<CmsContainerElementBean>();
for (CmsContainerElementBean element : container.getElements()) {
CmsContainerElementBean copyElement = adjustSettings(
element,
container.getName(),
adjustedContainerName,
isCopyGroup);
if (m_sessionCache != null) {
m_sessionCache.setCacheContainerElement(copyElement.editorHash(), copyElement); // depends on control dependency: [if], data = [none]
}
elements.add(copyElement); // depends on control dependency: [for], data = [element]
result.addAll(
collectModelStructure(
element.getInstanceId(),
copyElement.getInstanceId(),
containerByParent,
isCopyGroup)); // depends on control dependency: [for], data = [none]
}
result.add(
new CmsContainerBean(
adjustedContainerName,
container.getType(),
replaceModelId,
container.isRootContainer(),
container.getMaxElements(),
elements)); // depends on control dependency: [for], data = [none]
}
}
return result;
} }
|
public class class_name {
public void setFirstProperty() {
if (properties.getItemCount() > 0) {
properties.setSelectedIndex(0);
LinearInterpolator currentValue = (LinearInterpolator) values
.get(properties.getSelectedItem());
panel.setInterpolator(currentValue);
}
} }
|
public class class_name {
public void setFirstProperty() {
if (properties.getItemCount() > 0) {
properties.setSelectedIndex(0);
// depends on control dependency: [if], data = [0)]
LinearInterpolator currentValue = (LinearInterpolator) values
.get(properties.getSelectedItem());
panel.setInterpolator(currentValue);
// depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public void setPermissions(Map permissions) throws RepositoryException, AccessDeniedException,
AccessControlException
{
checkValid();
if (!isNodeType(Constants.EXO_PRIVILEGEABLE))
{
throw new AccessControlException("Node is not exo:privilegeable " + getPath());
}
if (permissions.size() == 0)
{
throw new RepositoryException("Permission map size cannot be 0");
}
checkPermission(PermissionType.CHANGE_PERMISSION);
List<AccessControlEntry> aces = new ArrayList<AccessControlEntry>();
for (Iterator<String> i = permissions.keySet().iterator(); i.hasNext();)
{
String identity = i.next();
if (identity == null)
{
throw new RepositoryException("Identity cannot be null");
}
String[] perm = (String[])permissions.get(identity);
if (perm == null)
{
throw new RepositoryException("Permissions cannot be null");
}
for (int j = 0; j < perm.length; j++)
{
AccessControlEntry ace = new AccessControlEntry(identity, perm[j]);
aces.add(ace);
}
}
AccessControlList acl = new AccessControlList(getACL().getOwner(), aces);
setACL(acl);
updatePermissions(acl);
} }
|
public class class_name {
public void setPermissions(Map permissions) throws RepositoryException, AccessDeniedException,
AccessControlException
{
checkValid();
if (!isNodeType(Constants.EXO_PRIVILEGEABLE))
{
throw new AccessControlException("Node is not exo:privilegeable " + getPath());
}
if (permissions.size() == 0)
{
throw new RepositoryException("Permission map size cannot be 0");
}
checkPermission(PermissionType.CHANGE_PERMISSION);
List<AccessControlEntry> aces = new ArrayList<AccessControlEntry>();
for (Iterator<String> i = permissions.keySet().iterator(); i.hasNext();)
{
String identity = i.next();
if (identity == null)
{
throw new RepositoryException("Identity cannot be null");
}
String[] perm = (String[])permissions.get(identity);
if (perm == null)
{
throw new RepositoryException("Permissions cannot be null");
}
for (int j = 0; j < perm.length; j++)
{
AccessControlEntry ace = new AccessControlEntry(identity, perm[j]);
aces.add(ace); // depends on control dependency: [for], data = [none]
}
}
AccessControlList acl = new AccessControlList(getACL().getOwner(), aces);
setACL(acl);
updatePermissions(acl);
} }
|
public class class_name {
public boolean isWhitespace(int start, int length)
{
int sourcechunk = start >>> m_chunkBits;
int sourcecolumn = start & m_chunkMask;
int available = m_chunkSize - sourcecolumn;
boolean chunkOK;
while (length > 0)
{
int runlength = (length <= available) ? length : available;
if (sourcechunk == 0 && m_innerFSB != null)
chunkOK = m_innerFSB.isWhitespace(sourcecolumn, runlength);
else
chunkOK = org.apache.xml.utils.XMLCharacterRecognizer.isWhiteSpace(
m_array[sourcechunk], sourcecolumn, runlength);
if (!chunkOK)
return false;
length -= runlength;
++sourcechunk;
sourcecolumn = 0;
available = m_chunkSize;
}
return true;
} }
|
public class class_name {
public boolean isWhitespace(int start, int length)
{
int sourcechunk = start >>> m_chunkBits;
int sourcecolumn = start & m_chunkMask;
int available = m_chunkSize - sourcecolumn;
boolean chunkOK;
while (length > 0)
{
int runlength = (length <= available) ? length : available;
if (sourcechunk == 0 && m_innerFSB != null)
chunkOK = m_innerFSB.isWhitespace(sourcecolumn, runlength);
else
chunkOK = org.apache.xml.utils.XMLCharacterRecognizer.isWhiteSpace(
m_array[sourcechunk], sourcecolumn, runlength);
if (!chunkOK)
return false;
length -= runlength; // depends on control dependency: [while], data = [none]
++sourcechunk; // depends on control dependency: [while], data = [none]
sourcecolumn = 0; // depends on control dependency: [while], data = [none]
available = m_chunkSize; // depends on control dependency: [while], data = [none]
}
return true;
} }
|
public class class_name {
public final EntityMetadata getEntityMetadata() {
EntityMetadata metadata = null;
try {
metadata = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, entityClass);
} catch (KunderaException e) {
logger.info("No Entity class provided, Proceeding as Scalar Query");
}
if (!this.isNativeQuery && metadata == null) {
throw new KunderaException("Unable to load entity metadata for : " + entityClass);
}
return metadata;
} }
|
public class class_name {
public final EntityMetadata getEntityMetadata() {
EntityMetadata metadata = null;
try {
metadata = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, entityClass); // depends on control dependency: [try], data = [none]
} catch (KunderaException e) {
logger.info("No Entity class provided, Proceeding as Scalar Query");
} // depends on control dependency: [catch], data = [none]
if (!this.isNativeQuery && metadata == null) {
throw new KunderaException("Unable to load entity metadata for : " + entityClass);
}
return metadata;
} }
|
public class class_name {
public byte[] convertToByteSilent(String htmlInput) {
try {
return convertToByte(htmlInput.getBytes(getEncoding()));
} catch (Exception e) {
if (LOG.isWarnEnabled()) {
LOG.warn(Messages.get().getBundle().key(Messages.LOG_CONVERSION_BYTE_FAILED_0), e);
}
try {
return htmlInput.getBytes(getEncoding());
} catch (UnsupportedEncodingException e1) {
if (LOG.isWarnEnabled()) {
LOG.warn(Messages.get().getBundle().key(Messages.LOG_CONVERSION_BYTE_FAILED_0), e1);
}
return htmlInput.getBytes();
}
}
} }
|
public class class_name {
public byte[] convertToByteSilent(String htmlInput) {
try {
return convertToByte(htmlInput.getBytes(getEncoding())); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
if (LOG.isWarnEnabled()) {
LOG.warn(Messages.get().getBundle().key(Messages.LOG_CONVERSION_BYTE_FAILED_0), e); // depends on control dependency: [if], data = [none]
}
try {
return htmlInput.getBytes(getEncoding()); // depends on control dependency: [try], data = [none]
} catch (UnsupportedEncodingException e1) {
if (LOG.isWarnEnabled()) {
LOG.warn(Messages.get().getBundle().key(Messages.LOG_CONVERSION_BYTE_FAILED_0), e1); // depends on control dependency: [if], data = [none]
}
return htmlInput.getBytes();
} // depends on control dependency: [catch], data = [none]
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
protected String extractApplicationLabel(final byte[] pData) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Extract Application label");
}
String label = null;
// Get Preferred name first
byte[] labelByte = TlvUtil.getValue(pData, EmvTags.APPLICATION_PREFERRED_NAME);
// Get Application label
if (labelByte == null) {
labelByte = TlvUtil.getValue(pData, EmvTags.APPLICATION_LABEL);
}
// Convert to String
if (labelByte != null) {
label = new String(labelByte);
}
return label;
} }
|
public class class_name {
protected String extractApplicationLabel(final byte[] pData) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Extract Application label"); // depends on control dependency: [if], data = [none]
}
String label = null;
// Get Preferred name first
byte[] labelByte = TlvUtil.getValue(pData, EmvTags.APPLICATION_PREFERRED_NAME);
// Get Application label
if (labelByte == null) {
labelByte = TlvUtil.getValue(pData, EmvTags.APPLICATION_LABEL); // depends on control dependency: [if], data = [none]
}
// Convert to String
if (labelByte != null) {
label = new String(labelByte); // depends on control dependency: [if], data = [(labelByte]
}
return label;
} }
|
public class class_name {
@Override
public Iterator<Location> iterator() {
List<Location> list;
if(isComplex()) {
list = getSubLocations();
}
else {
list = new ArrayList<Location>();
list.add(this);
}
return list.iterator();
} }
|
public class class_name {
@Override
public Iterator<Location> iterator() {
List<Location> list;
if(isComplex()) {
list = getSubLocations(); // depends on control dependency: [if], data = [none]
}
else {
list = new ArrayList<Location>(); // depends on control dependency: [if], data = [none]
list.add(this); // depends on control dependency: [if], data = [none]
}
return list.iterator();
} }
|
public class class_name {
public static void parseParameters( Map<String, String[]> map,
String data,
String encoding ) throws UnsupportedEncodingException {
if ((data != null) && (data.length() > 0)) {
// use the specified encoding to extract bytes out of the
// given string so that the encoding is not lost. If an
// encoding is not specified, let it use platform default
byte[] bytes = null;
try {
if (encoding == null) {
bytes = data.getBytes();
} else {
bytes = data.getBytes(encoding);
}
} catch (UnsupportedEncodingException uee) {
}
parseParameters(map, bytes, encoding);
}
} }
|
public class class_name {
public static void parseParameters( Map<String, String[]> map,
String data,
String encoding ) throws UnsupportedEncodingException {
if ((data != null) && (data.length() > 0)) {
// use the specified encoding to extract bytes out of the
// given string so that the encoding is not lost. If an
// encoding is not specified, let it use platform default
byte[] bytes = null;
try {
if (encoding == null) {
bytes = data.getBytes(); // depends on control dependency: [if], data = [none]
} else {
bytes = data.getBytes(encoding); // depends on control dependency: [if], data = [(encoding]
}
} catch (UnsupportedEncodingException uee) {
} // depends on control dependency: [catch], data = [none]
parseParameters(map, bytes, encoding);
}
} }
|
public class class_name {
@Api
public void setUseCache(boolean useCache) {
if (null == cacheManagerService && useCache) {
log.warn("The caching plugin needs to be available to cache WMS requests. Not setting useCache.");
} else {
this.useCache = useCache;
}
} }
|
public class class_name {
@Api
public void setUseCache(boolean useCache) {
if (null == cacheManagerService && useCache) {
log.warn("The caching plugin needs to be available to cache WMS requests. Not setting useCache."); // depends on control dependency: [if], data = [none]
} else {
this.useCache = useCache; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static float nextFloat(final float startInclusive, final float endInclusive) {
Validate.isTrue(endInclusive >= startInclusive,
"Start value must be smaller or equal to end value.");
Validate.isTrue(startInclusive >= 0, "Both range values must be non-negative.");
if (startInclusive == endInclusive) {
return startInclusive;
}
return startInclusive + ((endInclusive - startInclusive) * RANDOM.nextFloat());
} }
|
public class class_name {
public static float nextFloat(final float startInclusive, final float endInclusive) {
Validate.isTrue(endInclusive >= startInclusive,
"Start value must be smaller or equal to end value.");
Validate.isTrue(startInclusive >= 0, "Both range values must be non-negative.");
if (startInclusive == endInclusive) {
return startInclusive; // depends on control dependency: [if], data = [none]
}
return startInclusive + ((endInclusive - startInclusive) * RANDOM.nextFloat());
} }
|
public class class_name {
public Map<String, Set<String>> getMimeApps() throws OSException {
// lazy initialization
if (mimeApps == null) {
mimeApps = new HashMap<>();
for (String path : MIME_APPS_LISTS) {
File mimeAppListFile = new File(path);
if (mimeAppListFile.exists() && mimeAppListFile.isFile() && mimeAppListFile.canRead()) {
try (BufferedReader br = new BufferedReader(new FileReader(mimeAppListFile))) {
for (String line; (line = br.readLine()) != null;) {
String mimeType = null;
Set<String> apps = new HashSet<>();
Matcher extMatcher = MIME_TYPE_APPS_PATTERN.matcher(line);
while (extMatcher.find()) {
if (extMatcher.group(1) != null) {
mimeType = extMatcher.group(1);
}
if (extMatcher.group(2) != null) {
apps.add(extMatcher.group(2));
}
}
if (mimeType != null && apps.size() > 0) {
mimeApps.put(mimeType, apps);
}
}
} catch (IOException e) {
throw new OSException(e);
}
}
}
}
return Collections.unmodifiableMap(mimeApps);
} }
|
public class class_name {
public Map<String, Set<String>> getMimeApps() throws OSException {
// lazy initialization
if (mimeApps == null) {
mimeApps = new HashMap<>();
for (String path : MIME_APPS_LISTS) {
File mimeAppListFile = new File(path);
if (mimeAppListFile.exists() && mimeAppListFile.isFile() && mimeAppListFile.canRead()) {
try (BufferedReader br = new BufferedReader(new FileReader(mimeAppListFile))) {
for (String line; (line = br.readLine()) != null;) {
String mimeType = null;
Set<String> apps = new HashSet<>();
Matcher extMatcher = MIME_TYPE_APPS_PATTERN.matcher(line);
while (extMatcher.find()) {
if (extMatcher.group(1) != null) {
mimeType = extMatcher.group(1); // depends on control dependency: [if], data = [none]
}
if (extMatcher.group(2) != null) {
apps.add(extMatcher.group(2)); // depends on control dependency: [if], data = [(extMatcher.group(2)]
}
}
if (mimeType != null && apps.size() > 0) {
mimeApps.put(mimeType, apps); // depends on control dependency: [if], data = [(mimeType]
}
}
} catch (IOException e) {
throw new OSException(e);
}
}
}
}
return Collections.unmodifiableMap(mimeApps);
} }
|
public class class_name {
@Nonnull
public static FileIOError copyDirRecursive (@Nonnull final Path aSourceDir, @Nonnull final Path aTargetDir)
{
ValueEnforcer.notNull (aSourceDir, "SourceDirectory");
ValueEnforcer.notNull (aTargetDir, "TargetDirectory");
final Path aRealSourceDir = _getUnifiedPath (aSourceDir);
final Path aRealTargetDir = _getUnifiedPath (aTargetDir);
// Does the source directory exist?
if (!aRealSourceDir.toFile ().isDirectory ())
return EFileIOErrorCode.SOURCE_DOES_NOT_EXIST.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE, aRealSourceDir);
// Are source and target different?
if (EqualsHelper.equals (aRealSourceDir, aRealTargetDir))
return EFileIOErrorCode.SOURCE_EQUALS_TARGET.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE, aRealSourceDir);
// Is the source a parent of target?
if (PathHelper.isParentDirectory (aRealSourceDir, aRealTargetDir))
return EFileIOErrorCode.TARGET_IS_CHILD_OF_SOURCE.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE,
aRealSourceDir,
aRealTargetDir);
// Does the target directory already exist?
if (aRealTargetDir.toFile ().exists ())
return EFileIOErrorCode.TARGET_ALREADY_EXISTS.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE, aRealTargetDir);
// Is the source directory readable?
if (!Files.isReadable (aRealSourceDir))
return EFileIOErrorCode.SOURCE_NOT_READABLE.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE, aRealSourceDir);
// Is the target parent directory writable?
final Path aTargetParentDir = aRealTargetDir.getParent ();
if (aTargetParentDir != null && aTargetParentDir.toFile ().exists () && !Files.isWritable (aTargetParentDir))
return EFileIOErrorCode.TARGET_PARENT_NOT_WRITABLE.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE,
aRealTargetDir);
FileIOError eCode;
// Ensure the targets parent directory is present
eCode = createDirRecursive (aRealTargetDir);
if (eCode.isFailure ())
return eCode;
for (final Path aChild : PathHelper.getDirectoryContent (aRealSourceDir))
{
final File aChildFile = aChild.toFile ();
if (aChildFile.isDirectory ())
{
// Skip "." and ".."
if (FilenameHelper.isSystemInternalDirectory (aChild))
continue;
// Copy directory
eCode = copyDirRecursive (aChild, aRealTargetDir.resolve (aChild.getFileName ()));
if (eCode.isFailure ())
return eCode;
}
else
if (aChildFile.isFile ())
{
// Copy a file
eCode = copyFile (aChild, aRealTargetDir.resolve (aChild.getFileName ()));
if (eCode.isFailure ())
return eCode;
}
else
{
// Neither directory not file - don't know how to handle
return EFileIOErrorCode.OBJECT_CANNOT_BE_HANDLED.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE, aChild);
}
}
// Done
return EFileIOErrorCode.NO_ERROR.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE, aRealSourceDir, aRealTargetDir);
} }
|
public class class_name {
@Nonnull
public static FileIOError copyDirRecursive (@Nonnull final Path aSourceDir, @Nonnull final Path aTargetDir)
{
ValueEnforcer.notNull (aSourceDir, "SourceDirectory");
ValueEnforcer.notNull (aTargetDir, "TargetDirectory");
final Path aRealSourceDir = _getUnifiedPath (aSourceDir);
final Path aRealTargetDir = _getUnifiedPath (aTargetDir);
// Does the source directory exist?
if (!aRealSourceDir.toFile ().isDirectory ())
return EFileIOErrorCode.SOURCE_DOES_NOT_EXIST.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE, aRealSourceDir);
// Are source and target different?
if (EqualsHelper.equals (aRealSourceDir, aRealTargetDir))
return EFileIOErrorCode.SOURCE_EQUALS_TARGET.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE, aRealSourceDir);
// Is the source a parent of target?
if (PathHelper.isParentDirectory (aRealSourceDir, aRealTargetDir))
return EFileIOErrorCode.TARGET_IS_CHILD_OF_SOURCE.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE,
aRealSourceDir,
aRealTargetDir);
// Does the target directory already exist?
if (aRealTargetDir.toFile ().exists ())
return EFileIOErrorCode.TARGET_ALREADY_EXISTS.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE, aRealTargetDir);
// Is the source directory readable?
if (!Files.isReadable (aRealSourceDir))
return EFileIOErrorCode.SOURCE_NOT_READABLE.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE, aRealSourceDir);
// Is the target parent directory writable?
final Path aTargetParentDir = aRealTargetDir.getParent ();
if (aTargetParentDir != null && aTargetParentDir.toFile ().exists () && !Files.isWritable (aTargetParentDir))
return EFileIOErrorCode.TARGET_PARENT_NOT_WRITABLE.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE,
aRealTargetDir);
FileIOError eCode;
// Ensure the targets parent directory is present
eCode = createDirRecursive (aRealTargetDir);
if (eCode.isFailure ())
return eCode;
for (final Path aChild : PathHelper.getDirectoryContent (aRealSourceDir))
{
final File aChildFile = aChild.toFile ();
if (aChildFile.isDirectory ())
{
// Skip "." and ".."
if (FilenameHelper.isSystemInternalDirectory (aChild))
continue;
// Copy directory
eCode = copyDirRecursive (aChild, aRealTargetDir.resolve (aChild.getFileName ())); // depends on control dependency: [if], data = [none]
if (eCode.isFailure ())
return eCode;
}
else
if (aChildFile.isFile ())
{
// Copy a file
eCode = copyFile (aChild, aRealTargetDir.resolve (aChild.getFileName ())); // depends on control dependency: [if], data = [none]
if (eCode.isFailure ())
return eCode;
}
else
{
// Neither directory not file - don't know how to handle
return EFileIOErrorCode.OBJECT_CANNOT_BE_HANDLED.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE, aChild); // depends on control dependency: [if], data = [none]
}
}
// Done
return EFileIOErrorCode.NO_ERROR.getAsIOError (EFileIOOperation.COPY_DIR_RECURSIVE, aRealSourceDir, aRealTargetDir);
} }
|
public class class_name {
private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException
{
// Read non transient fields.
in.defaultReadObject();
// Read transient fields.
int n = in.readInt();
if (n == 0)
{
ivPuIds = NoPuIds;
}
else
{
ivPuIds = new JPAPuId[n];
for (int i = 0; i < n; ++i)
{
JPAPuId id = (JPAPuId) in.readObject();
ivPuIds[i] = id;
}
}
} }
|
public class class_name {
private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException
{
// Read non transient fields.
in.defaultReadObject();
// Read transient fields.
int n = in.readInt();
if (n == 0)
{
ivPuIds = NoPuIds;
}
else
{
ivPuIds = new JPAPuId[n];
for (int i = 0; i < n; ++i)
{
JPAPuId id = (JPAPuId) in.readObject();
ivPuIds[i] = id; // depends on control dependency: [for], data = [i]
}
}
} }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.