code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
public static int lastIndexOfAny(final CharSequence str, final CharSequence... searchStrs) {
if (str == null || searchStrs == null) {
return INDEX_NOT_FOUND;
}
int ret = INDEX_NOT_FOUND;
int tmp = 0;
for (final CharSequence search : searchStrs) {
if (search == null) {
continue;
}
tmp = CharSequenceUtils.lastIndexOf(str, search, str.length());
if (tmp > ret) {
ret = tmp;
}
}
return ret;
} } | public class class_name {
public static int lastIndexOfAny(final CharSequence str, final CharSequence... searchStrs) {
if (str == null || searchStrs == null) {
return INDEX_NOT_FOUND; // depends on control dependency: [if], data = [none]
}
int ret = INDEX_NOT_FOUND;
int tmp = 0;
for (final CharSequence search : searchStrs) {
if (search == null) {
continue;
}
tmp = CharSequenceUtils.lastIndexOf(str, search, str.length()); // depends on control dependency: [for], data = [search]
if (tmp > ret) {
ret = tmp; // depends on control dependency: [if], data = [none]
}
}
return ret;
} } |
public class class_name {
protected List<ExecutableScript> initEnvForLanguage(String language) {
List<ExecutableScript> scripts = new ArrayList<ExecutableScript>();
for (ScriptEnvResolver resolver : envResolvers) {
String[] resolvedScripts = resolver.resolve(language);
if(resolvedScripts != null) {
for (String resolvedScript : resolvedScripts) {
scripts.add(scriptFactory.createScriptFromSource(language, resolvedScript));
}
}
}
return scripts;
} } | public class class_name {
protected List<ExecutableScript> initEnvForLanguage(String language) {
List<ExecutableScript> scripts = new ArrayList<ExecutableScript>();
for (ScriptEnvResolver resolver : envResolvers) {
String[] resolvedScripts = resolver.resolve(language);
if(resolvedScripts != null) {
for (String resolvedScript : resolvedScripts) {
scripts.add(scriptFactory.createScriptFromSource(language, resolvedScript)); // depends on control dependency: [for], data = [resolvedScript]
}
}
}
return scripts;
} } |
public class class_name {
private boolean match(String keyword, int fromIndex) {
int match = 0;
// 查找非空白字符。
for (int index = fromIndex; index < length; index++) {
char ch = pattern.charAt(index);
if (!Character.isWhitespace(ch)) {
match = index;
break;
}
}
// 匹配关键字内容。
for (int index = 0; index < keyword.length(); index++) {
char ch = pattern.charAt(match);
if (ch != keyword.charAt(index)) {
// 与关键字不匹配
return false;
}
match++;
}
// 修改当前位置
position = match;
return true;
} } | public class class_name {
private boolean match(String keyword, int fromIndex) {
int match = 0;
// 查找非空白字符。
for (int index = fromIndex; index < length; index++) {
char ch = pattern.charAt(index);
if (!Character.isWhitespace(ch)) {
match = index; // depends on control dependency: [if], data = [none]
break;
}
}
// 匹配关键字内容。
for (int index = 0; index < keyword.length(); index++) {
char ch = pattern.charAt(match);
if (ch != keyword.charAt(index)) {
// 与关键字不匹配
return false; // depends on control dependency: [if], data = [none]
}
match++; // depends on control dependency: [for], data = [none]
}
// 修改当前位置
position = match;
return true;
} } |
public class class_name {
static <E> ImmutableMap<E, Integer> indexMap(Collection<E> list) {
ImmutableMap.Builder<E, Integer> builder = new ImmutableMap.Builder<>(list.size());
int i = 0;
for (E e : list) {
builder.put(e, i++);
}
return builder.build();
} } | public class class_name {
static <E> ImmutableMap<E, Integer> indexMap(Collection<E> list) {
ImmutableMap.Builder<E, Integer> builder = new ImmutableMap.Builder<>(list.size());
int i = 0;
for (E e : list) {
builder.put(e, i++); // depends on control dependency: [for], data = [e]
}
return builder.build();
} } |
public class class_name {
WebSocketPacket decode(final Logger logger, final WebSocketRunner runner, final WebSocket webSocket, final int wsmaxbody,
final AbstractMap.SimpleEntry<String, byte[]> halfBytes, final ByteBuffer buffer) {
//开始
final boolean debug = false; //调试开关
if (debug) logger.log(Level.FINEST, "read websocket message's length = " + buffer.remaining());
if (!buffer.hasRemaining()) return NONE;
if (buffer.remaining() < 2) {
byte[] bs = new byte[buffer.remaining()];
buffer.get(bs);
halfBytes.setValue(bs);
return NONE;
}
final byte opcode = buffer.get(); //第一个字节
this.last = (opcode & 0b1000_0000) != 0;
this.type = FrameType.valueOf(opcode & 0xF);
if (type == FrameType.CLOSE) {
if (debug) logger.log(Level.FINEST, " receive close command from websocket client");
}
if (type == null) {
logger.log(Level.SEVERE, " receive unknown frametype(opcode=" + (opcode & 0xF) + ") from websocket client");
}
final boolean checkrsv = false;//暂时不校验
if (checkrsv && (opcode & 0b0111_0000) != 0) {
if (debug) logger.log(Level.FINE, "rsv1 rsv2 rsv3 must be 0, but not (" + opcode + ")");
return null; //rsv1 rsv2 rsv3 must be 0
}
//0x00 表示一个后续帧
//0x01 表示一个文本帧
//0x02 表示一个二进制帧
//0x03-07 为以后的非控制帧保留
//0x8 表示一个连接关闭
//0x9 表示一个ping
//0xA 表示一个pong
//0x0B-0F 为以后的控制帧保留
final boolean control = (opcode & 0b0000_1000) != 0; //是否控制帧
final byte crcode = buffer.get(); //第二个字节
byte lengthCode = crcode;
final boolean masked = (lengthCode & 0x80) == 0x80;
if (masked) lengthCode ^= 0x80; //mask
//判断Buffer剩余内容够不够基本信息的创建
int minBufferLength = ((lengthCode <= 0x7D) ? 0 : (lengthCode == 0x7E ? 2 : 4)) + (masked ? 4 : 0);
if (buffer.remaining() < minBufferLength) {
byte[] bs = new byte[2 + buffer.remaining()];
bs[0] = opcode;
bs[1] = crcode;
buffer.get(bs, 2, buffer.remaining());
halfBytes.setValue(bs);
return NONE;
}
int length;
if (lengthCode <= 0x7D) { //125
length = lengthCode;
} else {
if (control) {
if (debug) logger.log(Level.FINE, " receive control command from websocket client");
return null;
}
if (lengthCode == 0x7E) {//0x7E=126
length = (int) buffer.getChar();
} else if (lengthCode == 0x7F) {//0x7E=127
length = (int) buffer.getLong();
} else {
length = buffer.getInt();
}
}
if (length > wsmaxbody && wsmaxbody > 0) {
logger.log(Level.WARNING, "message length (" + length + ") too big, must less " + wsmaxbody + "");
return null;
}
this.receiveLength = length;
if (debug) logger.finest("this.receiveLength: " + length + ", code=" + lengthCode + ", last=" + last);
if (masked) {
final byte[] masks = new byte[4];
buffer.get(masks);
this.receiveMasker = new ConvertMask() {
private int index = 0;
@Override
public byte unmask(byte value) {
return (byte) (value ^ masks[index++ % 4]);
}
};
}
if (buffer.remaining() >= this.receiveLength) { //内容足够, 可以解析
this.parseReceiveMessage(logger, runner, webSocket, buffer);
this.receiveCount = this.receiveLength;
} else {
this.receiveCount = buffer.remaining();
this.receiveBuffers = buffer.hasRemaining() ? new ByteBuffer[]{buffer} : null;
}
return this;
} } | public class class_name {
WebSocketPacket decode(final Logger logger, final WebSocketRunner runner, final WebSocket webSocket, final int wsmaxbody,
final AbstractMap.SimpleEntry<String, byte[]> halfBytes, final ByteBuffer buffer) {
//开始
final boolean debug = false; //调试开关
if (debug) logger.log(Level.FINEST, "read websocket message's length = " + buffer.remaining());
if (!buffer.hasRemaining()) return NONE;
if (buffer.remaining() < 2) {
byte[] bs = new byte[buffer.remaining()];
buffer.get(bs);
// depends on control dependency: [if], data = [none]
halfBytes.setValue(bs);
// depends on control dependency: [if], data = [none]
return NONE;
// depends on control dependency: [if], data = [none]
}
final byte opcode = buffer.get(); //第一个字节
this.last = (opcode & 0b1000_0000) != 0;
this.type = FrameType.valueOf(opcode & 0xF);
if (type == FrameType.CLOSE) {
if (debug) logger.log(Level.FINEST, " receive close command from websocket client");
}
if (type == null) {
logger.log(Level.SEVERE, " receive unknown frametype(opcode=" + (opcode & 0xF) + ") from websocket client");
// depends on control dependency: [if], data = [none]
}
final boolean checkrsv = false;//暂时不校验
if (checkrsv && (opcode & 0b0111_0000) != 0) {
if (debug) logger.log(Level.FINE, "rsv1 rsv2 rsv3 must be 0, but not (" + opcode + ")");
return null; //rsv1 rsv2 rsv3 must be 0
// depends on control dependency: [if], data = [none]
}
//0x00 表示一个后续帧
//0x01 表示一个文本帧
//0x02 表示一个二进制帧
//0x03-07 为以后的非控制帧保留
//0x8 表示一个连接关闭
//0x9 表示一个ping
//0xA 表示一个pong
//0x0B-0F 为以后的控制帧保留
final boolean control = (opcode & 0b0000_1000) != 0; //是否控制帧
final byte crcode = buffer.get(); //第二个字节
byte lengthCode = crcode;
final boolean masked = (lengthCode & 0x80) == 0x80;
if (masked) lengthCode ^= 0x80; //mask
//判断Buffer剩余内容够不够基本信息的创建
int minBufferLength = ((lengthCode <= 0x7D) ? 0 : (lengthCode == 0x7E ? 2 : 4)) + (masked ? 4 : 0);
if (buffer.remaining() < minBufferLength) {
byte[] bs = new byte[2 + buffer.remaining()];
bs[0] = opcode;
// depends on control dependency: [if], data = [none]
bs[1] = crcode;
// depends on control dependency: [if], data = [none]
buffer.get(bs, 2, buffer.remaining());
// depends on control dependency: [if], data = [none]
halfBytes.setValue(bs);
// depends on control dependency: [if], data = [none]
return NONE;
// depends on control dependency: [if], data = [none]
}
int length;
if (lengthCode <= 0x7D) { //125
length = lengthCode;
// depends on control dependency: [if], data = [none]
} else {
if (control) {
if (debug) logger.log(Level.FINE, " receive control command from websocket client");
return null;
// depends on control dependency: [if], data = [none]
}
if (lengthCode == 0x7E) {//0x7E=126
length = (int) buffer.getChar();
// depends on control dependency: [if], data = [none]
} else if (lengthCode == 0x7F) {//0x7E=127
length = (int) buffer.getLong();
// depends on control dependency: [if], data = [none]
} else {
length = buffer.getInt();
// depends on control dependency: [if], data = [none]
}
}
if (length > wsmaxbody && wsmaxbody > 0) {
logger.log(Level.WARNING, "message length (" + length + ") too big, must less " + wsmaxbody + "");
return null;
}
this.receiveLength = length;
if (debug) logger.finest("this.receiveLength: " + length + ", code=" + lengthCode + ", last=" + last);
if (masked) {
final byte[] masks = new byte[4];
buffer.get(masks);
this.receiveMasker = new ConvertMask() {
private int index = 0;
@Override
public byte unmask(byte value) {
return (byte) (value ^ masks[index++ % 4]);
}
};
}
if (buffer.remaining() >= this.receiveLength) { //内容足够, 可以解析
this.parseReceiveMessage(logger, runner, webSocket, buffer);
// depends on control dependency: [if], data = [none]
this.receiveCount = this.receiveLength;
// depends on control dependency: [if], data = [none]
} else {
this.receiveCount = buffer.remaining();
// depends on control dependency: [if], data = [none]
this.receiveBuffers = buffer.hasRemaining() ? new ByteBuffer[]{buffer} : null;
// depends on control dependency: [if], data = [none]
}
return this;
} } |
public class class_name {
public static <T> T selectValue(T defaultValue, T... overrides) {
for (T override : overrides) {
if (override != null) {
return override;
}
}
return defaultValue;
} } | public class class_name {
public static <T> T selectValue(T defaultValue, T... overrides) {
for (T override : overrides) {
if (override != null) {
return override; // depends on control dependency: [if], data = [none]
}
}
return defaultValue;
} } |
public class class_name {
static String awaitComplete(CloudDNS api, Job job) {
RetryableException retryableException = new RetryableException(
format("Job %s did not complete. Check your logs.", job.id), null);
Retryer retryer = new Retryer.Default(500, 1000, 30);
while (true) {
job = api.getStatus(job.id);
if ("COMPLETED".equals(job.status)) {
return job.resultId;
} else if ("ERROR".equals(job.status)) {
throw new IllegalStateException(
format("Job %s failed with error: %s", job.id, job.errorDetails));
}
retryer.continueOrPropagate(retryableException);
}
} } | public class class_name {
static String awaitComplete(CloudDNS api, Job job) {
RetryableException retryableException = new RetryableException(
format("Job %s did not complete. Check your logs.", job.id), null);
Retryer retryer = new Retryer.Default(500, 1000, 30);
while (true) {
job = api.getStatus(job.id); // depends on control dependency: [while], data = [none]
if ("COMPLETED".equals(job.status)) {
return job.resultId; // depends on control dependency: [if], data = [none]
} else if ("ERROR".equals(job.status)) {
throw new IllegalStateException(
format("Job %s failed with error: %s", job.id, job.errorDetails));
}
retryer.continueOrPropagate(retryableException); // depends on control dependency: [while], data = [none]
}
} } |
public class class_name {
@Override
public synchronized boolean shutdown()
{
if (!started)
return false;
try
{
log.info("Server shutting down ...");
// Undeploy bridges
undeployBridges();
// Stop the tcp listener
if (tcpListener != null)
{
tcpListener.stop();
tcpListener = null;
}
// Terminate the admin thread
if (adminThread != null)
{
adminThread.pleaseStop();
try
{
adminThread.join(ADMIN_THREAD_STOP_TIMEOUT*1000);
adminThread = null;
}
catch (InterruptedException e)
{
log.warn("Shutdown was interrupted while waiting for the admin thread to stop");
}
}
// Undeploy engine
engine.undeploy();
try
{
if (jmxAgent != null)
jmxAgent.unregister(new ObjectName(JMXAgent.JMX_DOMAIN+":type=Engines,engine="+engine.getName()));
}
catch (Exception e)
{
log.error("Cannot unregister local engine from JMX agent",e);
}
// Undeploy JMX
if (jmxAgent != null)
jmxAgent.stop();
started = false;
log.info("Shutdown complete.");
return true;
}
catch (Exception e)
{
log.error("Server shutdown failed",e);
return false;
}
} } | public class class_name {
@Override
public synchronized boolean shutdown()
{
if (!started)
return false;
try
{
log.info("Server shutting down ..."); // depends on control dependency: [try], data = [none]
// Undeploy bridges
undeployBridges(); // depends on control dependency: [try], data = [none]
// Stop the tcp listener
if (tcpListener != null)
{
tcpListener.stop(); // depends on control dependency: [if], data = [none]
tcpListener = null; // depends on control dependency: [if], data = [none]
}
// Terminate the admin thread
if (adminThread != null)
{
adminThread.pleaseStop(); // depends on control dependency: [if], data = [none]
try
{
adminThread.join(ADMIN_THREAD_STOP_TIMEOUT*1000); // depends on control dependency: [try], data = [none]
adminThread = null; // depends on control dependency: [try], data = [none]
}
catch (InterruptedException e)
{
log.warn("Shutdown was interrupted while waiting for the admin thread to stop");
} // depends on control dependency: [catch], data = [none]
}
// Undeploy engine
engine.undeploy(); // depends on control dependency: [try], data = [none]
try
{
if (jmxAgent != null)
jmxAgent.unregister(new ObjectName(JMXAgent.JMX_DOMAIN+":type=Engines,engine="+engine.getName()));
}
catch (Exception e)
{
log.error("Cannot unregister local engine from JMX agent",e);
} // depends on control dependency: [catch], data = [none]
// Undeploy JMX
if (jmxAgent != null)
jmxAgent.stop();
started = false; // depends on control dependency: [try], data = [none]
log.info("Shutdown complete."); // depends on control dependency: [try], data = [none]
return true; // depends on control dependency: [try], data = [none]
}
catch (Exception e)
{
log.error("Server shutdown failed",e);
return false;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private Constraint putToCache(final String id, final Constraint object) {
Constraint c = this.map.put(id, object);
if (c instanceof DummyConstraint) {
((DummyConstraint) c).constraint = object;
return null;
}
return c;
} } | public class class_name {
private Constraint putToCache(final String id, final Constraint object) {
Constraint c = this.map.put(id, object);
if (c instanceof DummyConstraint) {
((DummyConstraint) c).constraint = object;
// depends on control dependency: [if], data = [none]
return null;
// depends on control dependency: [if], data = [none]
}
return c;
} } |
public class class_name {
@SuppressWarnings("unchecked")
public static <T> List<T> getAny(Collection<T> collection, int... indexes) {
final int size = collection.size();
final ArrayList<T> result = new ArrayList<>();
if (collection instanceof List) {
final List<T> list = ((List<T>) collection);
for (int index : indexes) {
if (index < 0) {
index += size;
}
result.add(list.get(index));
}
} else {
Object[] array = ((Collection<T>) collection).toArray();
for (int index : indexes) {
if (index < 0) {
index += size;
}
result.add((T) array[index]);
}
}
return result;
} } | public class class_name {
@SuppressWarnings("unchecked")
public static <T> List<T> getAny(Collection<T> collection, int... indexes) {
final int size = collection.size();
final ArrayList<T> result = new ArrayList<>();
if (collection instanceof List) {
final List<T> list = ((List<T>) collection);
for (int index : indexes) {
if (index < 0) {
index += size;
// depends on control dependency: [if], data = [none]
}
result.add(list.get(index));
// depends on control dependency: [for], data = [index]
}
} else {
Object[] array = ((Collection<T>) collection).toArray();
for (int index : indexes) {
if (index < 0) {
index += size;
// depends on control dependency: [if], data = [none]
}
result.add((T) array[index]);
// depends on control dependency: [for], data = [index]
}
}
return result;
} } |
public class class_name {
@Nonnull
public Number toJavaLangNumber() {
int flags = getFlags();
if ((flags & F_DOUBLE) != 0)
return doubleValue();
else if ((flags & F_FLOAT) != 0)
return floatValue();
else if ((flags & (F_LONG | F_LONGLONG)) != 0)
return longValue();
else if ((flags & F_INT) != 0)
return intValue();
else if (getFractionalPart() != null)
return doubleValue(); // .1 is a double in Java.
else if (getExponent() != null)
return doubleValue();
else {
// This is an attempt to avoid overflowing on over-long integers.
// However, now we just overflow on over-long longs.
// We should really use BigInteger.
long value = longValue();
if (value <= Integer.MAX_VALUE && value >= Integer.MIN_VALUE)
return (int) value;
return value;
}
} } | public class class_name {
@Nonnull
public Number toJavaLangNumber() {
int flags = getFlags();
if ((flags & F_DOUBLE) != 0)
return doubleValue();
else if ((flags & F_FLOAT) != 0)
return floatValue();
else if ((flags & (F_LONG | F_LONGLONG)) != 0)
return longValue();
else if ((flags & F_INT) != 0)
return intValue();
else if (getFractionalPart() != null)
return doubleValue(); // .1 is a double in Java.
else if (getExponent() != null)
return doubleValue();
else {
// This is an attempt to avoid overflowing on over-long integers.
// However, now we just overflow on over-long longs.
// We should really use BigInteger.
long value = longValue();
if (value <= Integer.MAX_VALUE && value >= Integer.MIN_VALUE)
return (int) value;
return value; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private String removeAsterisk(String str)
{
if (str.startsWith("*"))
{
str = str.substring(1);
}
if (str.endsWith("*"))
{
str = str.substring(0, str.length() - 1);
}
return str;
} } | public class class_name {
private String removeAsterisk(String str)
{
if (str.startsWith("*"))
{
str = str.substring(1); // depends on control dependency: [if], data = [none]
}
if (str.endsWith("*"))
{
str = str.substring(0, str.length() - 1); // depends on control dependency: [if], data = [none]
}
return str;
} } |
public class class_name {
public final String complexOp() throws RecognitionException {
String opr = null;
Token t=null;
Token e=null;
try {
// src/main/resources/org/drools/compiler/lang/DRL5Expressions.g:126:5: (t= TILDE e= EQUALS_ASSIGN )
// src/main/resources/org/drools/compiler/lang/DRL5Expressions.g:126:7: t= TILDE e= EQUALS_ASSIGN
{
t=(Token)match(input,TILDE,FOLLOW_TILDE_in_complexOp477); if (state.failed) return opr;
e=(Token)match(input,EQUALS_ASSIGN,FOLLOW_EQUALS_ASSIGN_in_complexOp481); if (state.failed) return opr;
if ( state.backtracking==0 ) { opr =(t!=null?t.getText():null)+(e!=null?e.getText():null); }
}
}
catch (RecognitionException re) {
throw re;
}
finally {
// do for sure before leaving
}
return opr;
} } | public class class_name {
public final String complexOp() throws RecognitionException {
String opr = null;
Token t=null;
Token e=null;
try {
// src/main/resources/org/drools/compiler/lang/DRL5Expressions.g:126:5: (t= TILDE e= EQUALS_ASSIGN )
// src/main/resources/org/drools/compiler/lang/DRL5Expressions.g:126:7: t= TILDE e= EQUALS_ASSIGN
{
t=(Token)match(input,TILDE,FOLLOW_TILDE_in_complexOp477); if (state.failed) return opr;
e=(Token)match(input,EQUALS_ASSIGN,FOLLOW_EQUALS_ASSIGN_in_complexOp481); if (state.failed) return opr;
if ( state.backtracking==0 ) { opr =(t!=null?t.getText():null)+(e!=null?e.getText():null); } // depends on control dependency: [if], data = [none]
}
}
catch (RecognitionException re) {
throw re;
}
finally {
// do for sure before leaving
}
return opr;
} } |
public class class_name {
public static Throwable get(Throwable serThrowable, ClassLoader loader) {
if (serThrowable instanceof SerializedThrowable) {
return ((SerializedThrowable) serThrowable).deserializeError(loader);
} else {
return serThrowable;
}
} } | public class class_name {
public static Throwable get(Throwable serThrowable, ClassLoader loader) {
if (serThrowable instanceof SerializedThrowable) {
return ((SerializedThrowable) serThrowable).deserializeError(loader); // depends on control dependency: [if], data = [none]
} else {
return serThrowable; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String getLabel(ComponentJob job, boolean includeDescriptorName, boolean includeInputColumnNames,
boolean includeRequirements) {
final String jobName = job.getName();
final StringBuilder label = new StringBuilder();
if (Strings.isNullOrEmpty(jobName)) {
if (job instanceof ConfigurableBeanJob) {
BeanDescriptor<?> descriptor = ((ConfigurableBeanJob<?>) job).getDescriptor();
label.append(descriptor.getDisplayName());
} else {
label.append(job.toString());
}
} else {
label.append(jobName);
}
if (job instanceof AnalyzerJob) {
AnalyzerJob analyzerJob = (AnalyzerJob) job;
if (includeDescriptorName && !Strings.isNullOrEmpty(jobName)) {
label.append(" (");
label.append(analyzerJob.getDescriptor().getDisplayName());
label.append(')');
}
final InputColumn<?>[] input = analyzerJob.getInput();
if (input.length == 1) {
if (input[0].getName().equals(jobName)) {
// special case where jobName is the same as the single
// input column - in that case we'll leave out the column
// name
includeInputColumnNames = false;
}
}
if (includeInputColumnNames && input.length > 0) {
label.append(" (");
if (input.length < 5) {
for (int i = 0; i < input.length; i++) {
if (i != 0) {
label.append(',');
}
label.append(input[i].getName());
}
} else {
label.append(input.length);
label.append(" columns");
}
label.append(")");
}
final ComponentRequirement requirement = analyzerJob.getComponentRequirement();
if (includeRequirements && requirement != null) {
label.append(" (");
label.append(requirement.toString());
label.append(")");
}
}
return label.toString();
} } | public class class_name {
public static String getLabel(ComponentJob job, boolean includeDescriptorName, boolean includeInputColumnNames,
boolean includeRequirements) {
final String jobName = job.getName();
final StringBuilder label = new StringBuilder();
if (Strings.isNullOrEmpty(jobName)) {
if (job instanceof ConfigurableBeanJob) {
BeanDescriptor<?> descriptor = ((ConfigurableBeanJob<?>) job).getDescriptor();
label.append(descriptor.getDisplayName()); // depends on control dependency: [if], data = [none]
} else {
label.append(job.toString()); // depends on control dependency: [if], data = [none]
}
} else {
label.append(jobName); // depends on control dependency: [if], data = [none]
}
if (job instanceof AnalyzerJob) {
AnalyzerJob analyzerJob = (AnalyzerJob) job;
if (includeDescriptorName && !Strings.isNullOrEmpty(jobName)) {
label.append(" ("); // depends on control dependency: [if], data = [none]
label.append(analyzerJob.getDescriptor().getDisplayName()); // depends on control dependency: [if], data = [none]
label.append(')'); // depends on control dependency: [if], data = [none]
}
final InputColumn<?>[] input = analyzerJob.getInput();
if (input.length == 1) {
if (input[0].getName().equals(jobName)) {
// special case where jobName is the same as the single
// input column - in that case we'll leave out the column
// name
includeInputColumnNames = false; // depends on control dependency: [if], data = [none]
}
}
if (includeInputColumnNames && input.length > 0) {
label.append(" ("); // depends on control dependency: [if], data = [none]
if (input.length < 5) {
for (int i = 0; i < input.length; i++) {
if (i != 0) {
label.append(','); // depends on control dependency: [if], data = [none]
}
label.append(input[i].getName()); // depends on control dependency: [for], data = [i]
}
} else {
label.append(input.length); // depends on control dependency: [if], data = [(input.length]
label.append(" columns"); // depends on control dependency: [if], data = [none]
}
label.append(")"); // depends on control dependency: [if], data = [none]
}
final ComponentRequirement requirement = analyzerJob.getComponentRequirement();
if (includeRequirements && requirement != null) {
label.append(" ("); // depends on control dependency: [if], data = [none]
label.append(requirement.toString()); // depends on control dependency: [if], data = [none]
label.append(")"); // depends on control dependency: [if], data = [none]
}
}
return label.toString();
} } |
public class class_name {
public void marshall(InputSerialization inputSerialization, ProtocolMarshaller protocolMarshaller) {
if (inputSerialization == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(inputSerialization.getCsv(), CSV_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(InputSerialization inputSerialization, ProtocolMarshaller protocolMarshaller) {
if (inputSerialization == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(inputSerialization.getCsv(), CSV_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void marshall(CreateRealtimeEndpointRequest createRealtimeEndpointRequest, ProtocolMarshaller protocolMarshaller) {
if (createRealtimeEndpointRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(createRealtimeEndpointRequest.getMLModelId(), MLMODELID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(CreateRealtimeEndpointRequest createRealtimeEndpointRequest, ProtocolMarshaller protocolMarshaller) {
if (createRealtimeEndpointRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(createRealtimeEndpointRequest.getMLModelId(), MLMODELID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public UserAccessToken generateAccessToken(User user, String device) {
if (user == null)
return null;
Criteria criteria = this.dao.newCriteria(UserAccessToken.class)
.add(Restrictions.eq("user", user))
.add(Restrictions.eqOrIsNull("device", device))
.add(Restrictions.gt("expiration", new Date()))
.addOrder(Order.asc("expiration"));
List<UserAccessToken> tokens = this.dao.findByCriteria(criteria, UserAccessToken.class);
if (GeneralUtils.isEmpty(tokens)) {
UserAccessToken token = new UserAccessToken();
token.setUser(user);
token.setDevice(device);
token.setCreation(new Date());
token.setCreationIp(this.request.getRemoteAddr());
token.setExpiration(new Date(token.getCreation().getTime() + token.getTtl()));
token.setToken(CryptManager.digest(user.getEmail() + "#" + String.valueOf(token.getCreation().getTime())
+ "@" + String.valueOf(Math.random())));
this.dao.persist(token);
return token;
}
return tokens.get(0);
} } | public class class_name {
public UserAccessToken generateAccessToken(User user, String device) {
if (user == null)
return null;
Criteria criteria = this.dao.newCriteria(UserAccessToken.class)
.add(Restrictions.eq("user", user))
.add(Restrictions.eqOrIsNull("device", device))
.add(Restrictions.gt("expiration", new Date()))
.addOrder(Order.asc("expiration"));
List<UserAccessToken> tokens = this.dao.findByCriteria(criteria, UserAccessToken.class);
if (GeneralUtils.isEmpty(tokens)) {
UserAccessToken token = new UserAccessToken();
token.setUser(user); // depends on control dependency: [if], data = [none]
token.setDevice(device); // depends on control dependency: [if], data = [none]
token.setCreation(new Date()); // depends on control dependency: [if], data = [none]
token.setCreationIp(this.request.getRemoteAddr()); // depends on control dependency: [if], data = [none]
token.setExpiration(new Date(token.getCreation().getTime() + token.getTtl())); // depends on control dependency: [if], data = [none]
token.setToken(CryptManager.digest(user.getEmail() + "#" + String.valueOf(token.getCreation().getTime())
+ "@" + String.valueOf(Math.random()))); // depends on control dependency: [if], data = [none]
this.dao.persist(token); // depends on control dependency: [if], data = [none]
return token; // depends on control dependency: [if], data = [none]
}
return tokens.get(0);
} } |
public class class_name {
public void run()
{
Record record = this.getMainRecord();
try {
Writer out = new StringWriter();
MessageDetailTarget messageDetailTarget = (MessageDetailTarget)this.getMainRecord();
String strSite = messageDetailTarget.getProperty(TrxMessageHeader.DESTINATION_PARAM);
String strWSDLPath = messageDetailTarget.getProperty(TrxMessageHeader.WSDL_PATH);
strWSDLPath = this.getFullPath(strSite, strWSDLPath);
//x strWSDLPath = "http://www.tourloco.com:8181/tour/apphtml?datatype=wsdl&version=b2007";
Utility.transferURLStream(strWSDLPath, null, null, out);
out.flush();
out.close();
record.edit();
this.processWSDLXML(out.toString());
record.set();
if (this.getProperty(TrxMessageHeader.REGISTRY_ID) != null) // The return Queue ID
{
Application app = (Application)this.getTask().getApplication();
Integer intFilterID = new Integer(this.getProperty(TrxMessageHeader.REGISTRY_ID));
TrxMessageHeader messageHeader = new TrxMessageHeader(MessageConstants.TRX_RECEIVE_QUEUE, MessageConstants.INTERNET_QUEUE, null);
messageHeader.setRegistryIDMatch(intFilterID);
Map<String,Object> map = new Hashtable<String,Object>();
map.put(DBConstants.OBJECT_ID, this.getProperty(DBConstants.OBJECT_ID));
BaseMessage message = new MapMessage(messageHeader, map);
// Tell the sender that I've finished (not required)
app.getMessageManager().sendMessage(message);
}
} catch (IOException e) {
e.printStackTrace();
} catch (DBException e) {
e.printStackTrace();
return;
}
} } | public class class_name {
public void run()
{
Record record = this.getMainRecord();
try {
Writer out = new StringWriter();
MessageDetailTarget messageDetailTarget = (MessageDetailTarget)this.getMainRecord();
String strSite = messageDetailTarget.getProperty(TrxMessageHeader.DESTINATION_PARAM);
String strWSDLPath = messageDetailTarget.getProperty(TrxMessageHeader.WSDL_PATH);
strWSDLPath = this.getFullPath(strSite, strWSDLPath); // depends on control dependency: [try], data = [none]
//x strWSDLPath = "http://www.tourloco.com:8181/tour/apphtml?datatype=wsdl&version=b2007";
Utility.transferURLStream(strWSDLPath, null, null, out); // depends on control dependency: [try], data = [none]
out.flush(); // depends on control dependency: [try], data = [none]
out.close(); // depends on control dependency: [try], data = [none]
record.edit(); // depends on control dependency: [try], data = [none]
this.processWSDLXML(out.toString()); // depends on control dependency: [try], data = [none]
record.set(); // depends on control dependency: [try], data = [none]
if (this.getProperty(TrxMessageHeader.REGISTRY_ID) != null) // The return Queue ID
{
Application app = (Application)this.getTask().getApplication();
Integer intFilterID = new Integer(this.getProperty(TrxMessageHeader.REGISTRY_ID));
TrxMessageHeader messageHeader = new TrxMessageHeader(MessageConstants.TRX_RECEIVE_QUEUE, MessageConstants.INTERNET_QUEUE, null);
messageHeader.setRegistryIDMatch(intFilterID); // depends on control dependency: [if], data = [none]
Map<String,Object> map = new Hashtable<String,Object>();
map.put(DBConstants.OBJECT_ID, this.getProperty(DBConstants.OBJECT_ID)); // depends on control dependency: [if], data = [none]
BaseMessage message = new MapMessage(messageHeader, map);
// Tell the sender that I've finished (not required)
app.getMessageManager().sendMessage(message); // depends on control dependency: [if], data = [none]
}
} catch (IOException e) {
e.printStackTrace();
} catch (DBException e) { // depends on control dependency: [catch], data = [none]
e.printStackTrace();
return;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private static void setExtendedParentPointers(final int[] array) {
final int length = array.length;
array[0] += array[1];
for (int headNode = 0, tailNode = 1, topNode = 2; tailNode < length - 1; tailNode++) {
int temp;
if (topNode >= length || array[headNode] < array[topNode]) {
temp = array[headNode];
array[headNode++] = tailNode;
} else {
temp = array[topNode++];
}
if (topNode >= length || (headNode < tailNode && array[headNode] < array[topNode])) {
temp += array[headNode];
array[headNode++] = tailNode + length;
} else {
temp += array[topNode++];
}
array[tailNode] = temp;
}
} } | public class class_name {
private static void setExtendedParentPointers(final int[] array) {
final int length = array.length;
array[0] += array[1];
for (int headNode = 0, tailNode = 1, topNode = 2; tailNode < length - 1; tailNode++) {
int temp;
if (topNode >= length || array[headNode] < array[topNode]) {
temp = array[headNode]; // depends on control dependency: [if], data = [none]
array[headNode++] = tailNode; // depends on control dependency: [if], data = [none]
} else {
temp = array[topNode++]; // depends on control dependency: [if], data = [none]
}
if (topNode >= length || (headNode < tailNode && array[headNode] < array[topNode])) {
temp += array[headNode]; // depends on control dependency: [if], data = [none]
array[headNode++] = tailNode + length; // depends on control dependency: [if], data = [none]
} else {
temp += array[topNode++]; // depends on control dependency: [if], data = [none]
}
array[tailNode] = temp; // depends on control dependency: [for], data = [none]
}
} } |
public class class_name {
private static String getEncodeFileName(String userAgent, String fileName) {
String encodedFileName = fileName;
try {
if (userAgent.contains("MSIE") || userAgent.contains("Opera")) {
encodedFileName = URLEncoder.encode(fileName, "UTF-8");
} else {
encodedFileName = "=?UTF-8?B?" + new String(BaseEncoding.base64().encode(fileName.getBytes("UTF-8"))) + "?=";
}
} catch (Exception e) {
LOGGER.error(e.getMessage());
}
return encodedFileName;
} } | public class class_name {
private static String getEncodeFileName(String userAgent, String fileName) {
String encodedFileName = fileName;
try {
if (userAgent.contains("MSIE") || userAgent.contains("Opera")) {
encodedFileName = URLEncoder.encode(fileName, "UTF-8"); // depends on control dependency: [if], data = [none]
} else {
encodedFileName = "=?UTF-8?B?" + new String(BaseEncoding.base64().encode(fileName.getBytes("UTF-8"))) + "?="; // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
LOGGER.error(e.getMessage());
} // depends on control dependency: [catch], data = [none]
return encodedFileName;
} } |
public class class_name {
private void initDurationPanel() {
m_durationPrefixLabel.setText(Messages.get().key(Messages.GUI_SERIALDATE_DURATION_PREFIX_0));
m_durationAfterPostfixLabel.setText(Messages.get().key(Messages.GUI_SERIALDATE_DURATION_ENDTYPE_OCC_POSTFIX_0));
m_seriesEndDate.setDateOnly(true);
m_seriesEndDate.setAllowInvalidValue(true);
m_seriesEndDate.setValue(m_model.getSeriesEndDate());
m_seriesEndDate.getTextField().addFocusHandler(new FocusHandler() {
public void onFocus(FocusEvent event) {
if (handleChange()) {
onSeriesEndDateFocus(event);
}
}
});
} } | public class class_name {
private void initDurationPanel() {
m_durationPrefixLabel.setText(Messages.get().key(Messages.GUI_SERIALDATE_DURATION_PREFIX_0));
m_durationAfterPostfixLabel.setText(Messages.get().key(Messages.GUI_SERIALDATE_DURATION_ENDTYPE_OCC_POSTFIX_0));
m_seriesEndDate.setDateOnly(true);
m_seriesEndDate.setAllowInvalidValue(true);
m_seriesEndDate.setValue(m_model.getSeriesEndDate());
m_seriesEndDate.getTextField().addFocusHandler(new FocusHandler() {
public void onFocus(FocusEvent event) {
if (handleChange()) {
onSeriesEndDateFocus(event); // depends on control dependency: [if], data = [none]
}
}
});
} } |
public class class_name {
public void appendln(String str) {
BufferedReader rdr = new BufferedReader(new StringReader(str));
while (true) {
String line = null;
try {
line = rdr.readLine();
} catch (IOException e) {
// shouldn't ever happen since we're using a StringReader
throw new RuntimeException(e);
}
if (line != null) {
lines.add(new LineInfo(line));
continue;
}
break;
}
} } | public class class_name {
public void appendln(String str) {
BufferedReader rdr = new BufferedReader(new StringReader(str));
while (true) {
String line = null;
try {
line = rdr.readLine();
// depends on control dependency: [try], data = [none]
} catch (IOException e) {
// shouldn't ever happen since we're using a StringReader
throw new RuntimeException(e);
}
// depends on control dependency: [catch], data = [none]
if (line != null) {
lines.add(new LineInfo(line));
// depends on control dependency: [if], data = [(line]
continue;
}
break;
}
} } |
public class class_name {
@Override
public void addLinkOperationId(String operationId, String location) {
if (linkOperationIds.containsKey(operationId)) {
linkOperationIds.get(operationId).add(location);
} else {
Set<String> locations = new HashSet<String>();
locations.add(location);
linkOperationIds.put(operationId, locations);
}
} } | public class class_name {
@Override
public void addLinkOperationId(String operationId, String location) {
if (linkOperationIds.containsKey(operationId)) {
linkOperationIds.get(operationId).add(location); // depends on control dependency: [if], data = [none]
} else {
Set<String> locations = new HashSet<String>();
locations.add(location); // depends on control dependency: [if], data = [none]
linkOperationIds.put(operationId, locations); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void setMaxValue(final double VALUE) {
if (Status.RUNNING == timeline.getStatus()) { timeline.jumpTo(Duration.ONE); }
if (null == maxValue) {
if (VALUE < getMinValue()) { setMinValue(VALUE); }
_maxValue = clamp(getMinValue(), Double.MAX_VALUE, VALUE);
setRange(_maxValue - getMinValue());
if (Helper.equals(originalMaxValue, Double.MAX_VALUE)) originalMaxValue = _maxValue;
if (Helper.biggerThan(originalThreshold, getThreshold())) { setThreshold(clamp(getMinValue(), _maxValue, originalThreshold)); }
fireTileEvent(RECALC_EVENT);
if (!valueProperty().isBound()) Tile.this.setValue(clamp(getMinValue(), getMaxValue(), Tile.this.getValue()));
} else {
maxValue.set(VALUE);
}
} } | public class class_name {
public void setMaxValue(final double VALUE) {
if (Status.RUNNING == timeline.getStatus()) { timeline.jumpTo(Duration.ONE); } // depends on control dependency: [if], data = [none]
if (null == maxValue) {
if (VALUE < getMinValue()) { setMinValue(VALUE); } // depends on control dependency: [if], data = [(VALUE]
_maxValue = clamp(getMinValue(), Double.MAX_VALUE, VALUE); // depends on control dependency: [if], data = [none]
setRange(_maxValue - getMinValue()); // depends on control dependency: [if], data = [none]
if (Helper.equals(originalMaxValue, Double.MAX_VALUE)) originalMaxValue = _maxValue;
if (Helper.biggerThan(originalThreshold, getThreshold())) { setThreshold(clamp(getMinValue(), _maxValue, originalThreshold)); } // depends on control dependency: [if], data = [none]
fireTileEvent(RECALC_EVENT); // depends on control dependency: [if], data = [none]
if (!valueProperty().isBound()) Tile.this.setValue(clamp(getMinValue(), getMaxValue(), Tile.this.getValue()));
} else {
maxValue.set(VALUE); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String getPrefix(String s) {
int dot = s.lastIndexOf(".");
if (dot == -1) {
return s;
} else {
return s.substring(0, dot);
}
} } | public class class_name {
public static String getPrefix(String s) {
int dot = s.lastIndexOf(".");
if (dot == -1) {
return s; // depends on control dependency: [if], data = [none]
} else {
return s.substring(0, dot); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public String getJsonArray() {
int[] var=getArray();
if(var==null) {return null;}
return new com.google.gson.Gson().toJson(var);
} } | public class class_name {
public String getJsonArray() {
int[] var=getArray();
if(var==null) {return null;} // depends on control dependency: [if], data = [none]
return new com.google.gson.Gson().toJson(var);
} } |
public class class_name {
protected void addTopicContentsToLevelDocument(final DocBookVersion docBookVersion, final Level level, final SpecTopic specTopic,
final Element parentNode, final Document doc, final boolean includeInfo) {
final Node section = doc.importNode(specTopic.getXMLDocument().getDocumentElement(), true);
final String infoName;
if (docBookVersion == DocBookVersion.DOCBOOK_50) {
infoName = "info";
} else {
infoName = DocBookUtilities.TOPIC_ROOT_SECTIONINFO_NODE_NAME;
}
if (includeInfo && (level.getLevelType() != LevelType.PART)) {
// Reposition the sectioninfo
final List<Node> sectionInfoNodes = XMLUtilities.getDirectChildNodes(section, infoName);
if (sectionInfoNodes.size() != 0) {
final String parentInfoName;
if (docBookVersion == DocBookVersion.DOCBOOK_50) {
parentInfoName = "info";
} else {
parentInfoName = parentNode.getNodeName() + "info";
}
// Check if the parent already has a info node
final List<Node> infoNodes = XMLUtilities.getDirectChildNodes(parentNode, parentInfoName);
final Node infoNode;
if (infoNodes.size() == 0) {
infoNode = doc.createElement(parentInfoName);
DocBookUtilities.setInfo(docBookVersion, (Element) infoNode, parentNode);
} else {
infoNode = infoNodes.get(0);
}
// Merge the info text
final NodeList sectionInfoChildren = sectionInfoNodes.get(0).getChildNodes();
final Node firstNode = infoNode.getFirstChild();
while (sectionInfoChildren.getLength() > 0) {
if (firstNode != null) {
infoNode.insertBefore(sectionInfoChildren.item(0), firstNode);
} else {
infoNode.appendChild(sectionInfoChildren.item(0));
}
}
}
}
// Remove the title and sectioninfo
final List<Node> titleNodes = XMLUtilities.getDirectChildNodes(section, DocBookUtilities.TOPIC_ROOT_TITLE_NODE_NAME, infoName);
for (final Node removeNode : titleNodes) {
section.removeChild(removeNode);
}
// Move the contents of the section to the chapter/level
final NodeList sectionChildren = section.getChildNodes();
while (sectionChildren.getLength() > 0) {
parentNode.appendChild(sectionChildren.item(0));
}
} } | public class class_name {
protected void addTopicContentsToLevelDocument(final DocBookVersion docBookVersion, final Level level, final SpecTopic specTopic,
final Element parentNode, final Document doc, final boolean includeInfo) {
final Node section = doc.importNode(specTopic.getXMLDocument().getDocumentElement(), true);
final String infoName;
if (docBookVersion == DocBookVersion.DOCBOOK_50) {
infoName = "info"; // depends on control dependency: [if], data = [none]
} else {
infoName = DocBookUtilities.TOPIC_ROOT_SECTIONINFO_NODE_NAME; // depends on control dependency: [if], data = [none]
}
if (includeInfo && (level.getLevelType() != LevelType.PART)) {
// Reposition the sectioninfo
final List<Node> sectionInfoNodes = XMLUtilities.getDirectChildNodes(section, infoName);
if (sectionInfoNodes.size() != 0) {
final String parentInfoName;
if (docBookVersion == DocBookVersion.DOCBOOK_50) {
parentInfoName = "info"; // depends on control dependency: [if], data = [none]
} else {
parentInfoName = parentNode.getNodeName() + "info"; // depends on control dependency: [if], data = [none]
}
// Check if the parent already has a info node
final List<Node> infoNodes = XMLUtilities.getDirectChildNodes(parentNode, parentInfoName);
final Node infoNode;
if (infoNodes.size() == 0) {
infoNode = doc.createElement(parentInfoName); // depends on control dependency: [if], data = [none]
DocBookUtilities.setInfo(docBookVersion, (Element) infoNode, parentNode); // depends on control dependency: [if], data = [none]
} else {
infoNode = infoNodes.get(0); // depends on control dependency: [if], data = [0)]
}
// Merge the info text
final NodeList sectionInfoChildren = sectionInfoNodes.get(0).getChildNodes();
final Node firstNode = infoNode.getFirstChild();
while (sectionInfoChildren.getLength() > 0) {
if (firstNode != null) {
infoNode.insertBefore(sectionInfoChildren.item(0), firstNode); // depends on control dependency: [if], data = [none]
} else {
infoNode.appendChild(sectionInfoChildren.item(0)); // depends on control dependency: [if], data = [none]
}
}
}
}
// Remove the title and sectioninfo
final List<Node> titleNodes = XMLUtilities.getDirectChildNodes(section, DocBookUtilities.TOPIC_ROOT_TITLE_NODE_NAME, infoName);
for (final Node removeNode : titleNodes) {
section.removeChild(removeNode); // depends on control dependency: [for], data = [removeNode]
}
// Move the contents of the section to the chapter/level
final NodeList sectionChildren = section.getChildNodes();
while (sectionChildren.getLength() > 0) {
parentNode.appendChild(sectionChildren.item(0)); // depends on control dependency: [while], data = [0)]
}
} } |
public class class_name {
public void add(Historizable historizable) {
LOGGER.debug("+++ entering HistorizableList.add +++");
synchronized (historizables) {
if (historizables.size() >= fMaxEntriesToKeep) {
historizables.removeLast();
}
historizables.addFirst(historizable);
}
} } | public class class_name {
public void add(Historizable historizable) {
LOGGER.debug("+++ entering HistorizableList.add +++");
synchronized (historizables) {
if (historizables.size() >= fMaxEntriesToKeep) {
historizables.removeLast();
// depends on control dependency: [if], data = [none]
}
historizables.addFirst(historizable);
}
} } |
public class class_name {
public void removeProtoTypeFiles(String classFileName) {
Iterator<Compiler> iter = tempVector.iterator();
while (iter.hasNext()) {
Compiler c = iter.next();
if (classFileName == null) {
c.removeGeneratedClassFiles();
} else if (classFileName.equals(
c.getCompilationContext().getClassFileName())) {
c.removeGeneratedClassFiles();
tempVector.remove(c);
return;
}
}
} } | public class class_name {
public void removeProtoTypeFiles(String classFileName) {
Iterator<Compiler> iter = tempVector.iterator();
while (iter.hasNext()) {
Compiler c = iter.next();
if (classFileName == null) {
c.removeGeneratedClassFiles(); // depends on control dependency: [if], data = [none]
} else if (classFileName.equals(
c.getCompilationContext().getClassFileName())) {
c.removeGeneratedClassFiles(); // depends on control dependency: [if], data = [none]
tempVector.remove(c); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void extend(LatLonRect r) {
Preconditions.checkNotNull(r);
// lat is easy
double latMin = r.getLatMin();
double latMax = r.getLatMax();
if (latMax > upperRight.getLatitude()) {
upperRight.setLatitude(latMax);
}
if (latMin < lowerLeft.getLatitude()) {
lowerLeft.setLatitude(latMin);
}
// lon is uglier
if (allLongitude)
return;
// everything is reletive to current LonMin
double lonMin = getLonMin();
double lonMax = getLonMax();
double nlonMin = LatLonPointImpl.lonNormal( r.getLonMin(), lonMin);
double nlonMax = nlonMin + r.getWidth();
lonMin = Math.min(lonMin, nlonMin);
lonMax = Math.max(lonMax, nlonMax);
width = lonMax - lonMin;
allLongitude = width >= 360.0;
if (allLongitude) {
width = 360.0;
lonMin = -180.0;
} else {
lonMin = LatLonPointImpl.lonNormal(lonMin);
}
lowerLeft.setLongitude(lonMin);
upperRight.setLongitude(lonMin+width);
lon0 = lonMin+width/2;
crossDateline = lowerLeft.getLongitude() > upperRight.getLongitude();
} } | public class class_name {
public void extend(LatLonRect r) {
Preconditions.checkNotNull(r);
// lat is easy
double latMin = r.getLatMin();
double latMax = r.getLatMax();
if (latMax > upperRight.getLatitude()) {
upperRight.setLatitude(latMax);
// depends on control dependency: [if], data = [(latMax]
}
if (latMin < lowerLeft.getLatitude()) {
lowerLeft.setLatitude(latMin);
// depends on control dependency: [if], data = [(latMin]
}
// lon is uglier
if (allLongitude)
return;
// everything is reletive to current LonMin
double lonMin = getLonMin();
double lonMax = getLonMax();
double nlonMin = LatLonPointImpl.lonNormal( r.getLonMin(), lonMin);
double nlonMax = nlonMin + r.getWidth();
lonMin = Math.min(lonMin, nlonMin);
lonMax = Math.max(lonMax, nlonMax);
width = lonMax - lonMin;
allLongitude = width >= 360.0;
if (allLongitude) {
width = 360.0;
// depends on control dependency: [if], data = [none]
lonMin = -180.0;
// depends on control dependency: [if], data = [none]
} else {
lonMin = LatLonPointImpl.lonNormal(lonMin);
// depends on control dependency: [if], data = [none]
}
lowerLeft.setLongitude(lonMin);
upperRight.setLongitude(lonMin+width);
lon0 = lonMin+width/2;
crossDateline = lowerLeft.getLongitude() > upperRight.getLongitude();
} } |
public class class_name {
public static boolean verify(final String name, final X509Certificate cert) {
try {
verifier.verify(name, cert);
return true;
} catch (final SSLException ex) {
// this is only logged here because eventually a CertificateException will be throw in verifyAndThrow.
// If this method is called in another method, the caller should be responsible to throw exceptions,
logger.error(ex.getMessage(), ex);
return false;
}
} } | public class class_name {
public static boolean verify(final String name, final X509Certificate cert) {
try {
verifier.verify(name, cert); // depends on control dependency: [try], data = [none]
return true; // depends on control dependency: [try], data = [none]
} catch (final SSLException ex) {
// this is only logged here because eventually a CertificateException will be throw in verifyAndThrow.
// If this method is called in another method, the caller should be responsible to throw exceptions,
logger.error(ex.getMessage(), ex);
return false;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public void terminate() {
// disable ability to request for restart
restartRequested.set(false);
restartDisabled.set(true);
if (nodeLifecycle.tryToMoveTo(NodeLifecycle.State.STOPPING)) {
LOG.info("Stopping SonarQube");
}
stopAll();
if (stopperThread != null) {
stopperThread.interrupt();
}
if (restarterThread != null) {
restarterThread.interrupt();
}
keepAlive.countDown();
} } | public class class_name {
@Override
public void terminate() {
// disable ability to request for restart
restartRequested.set(false);
restartDisabled.set(true);
if (nodeLifecycle.tryToMoveTo(NodeLifecycle.State.STOPPING)) {
LOG.info("Stopping SonarQube"); // depends on control dependency: [if], data = [none]
}
stopAll();
if (stopperThread != null) {
stopperThread.interrupt(); // depends on control dependency: [if], data = [none]
}
if (restarterThread != null) {
restarterThread.interrupt(); // depends on control dependency: [if], data = [none]
}
keepAlive.countDown();
} } |
public class class_name {
@Override
public Object getObjectInstance(Object obj, Name name, Context nameCtx, Hashtable<?, ?> environment) throws Exception {
if (obj instanceof Reference) {
try {
String jndiName = nameCtx == null ? "" : nameCtx.getNameInNamespace();
jndiName += name == null ? "" : name.toString();
String urlValue = (String) ((Reference) obj).get(0).getContent();
return createURL(jndiName, urlValue);
} catch (Throwable t) {
NamingException ne = new NamingException();
ne.initCause(t);
throw ne;
}
}
return null;
} } | public class class_name {
@Override
public Object getObjectInstance(Object obj, Name name, Context nameCtx, Hashtable<?, ?> environment) throws Exception {
if (obj instanceof Reference) {
try {
String jndiName = nameCtx == null ? "" : nameCtx.getNameInNamespace();
jndiName += name == null ? "" : name.toString(); // depends on control dependency: [try], data = [none]
String urlValue = (String) ((Reference) obj).get(0).getContent();
return createURL(jndiName, urlValue); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
NamingException ne = new NamingException();
ne.initCause(t);
throw ne;
} // depends on control dependency: [catch], data = [none]
}
return null;
} } |
public class class_name {
public synchronized void setUseRedoThread(final boolean useRedoThread) {
if (this.useRedoThread && !useRedoThread) { // Stop Redo Thread
if (redoThread != null) {
stopRedoThread(redoThread);
}
}
this.useRedoThread = useRedoThread;
} } | public class class_name {
public synchronized void setUseRedoThread(final boolean useRedoThread) {
if (this.useRedoThread && !useRedoThread) { // Stop Redo Thread
if (redoThread != null) {
stopRedoThread(redoThread); // depends on control dependency: [if], data = [(redoThread]
}
}
this.useRedoThread = useRedoThread;
} } |
public class class_name {
public void checkValidatorCalled() {
if (!execute.isSuppressValidatorCallCheck() && certainlyNotBeValidatorCalled()) {
execute.getFormMeta().filter(meta -> isValidatorAnnotated(meta)).ifPresent(meta -> {
throwLonelyValidatorAnnotationException(meta); // #hope see fields in nested element
});
}
} } | public class class_name {
public void checkValidatorCalled() {
if (!execute.isSuppressValidatorCallCheck() && certainlyNotBeValidatorCalled()) {
execute.getFormMeta().filter(meta -> isValidatorAnnotated(meta)).ifPresent(meta -> {
throwLonelyValidatorAnnotationException(meta); // #hope see fields in nested element // depends on control dependency: [if], data = [none]
});
}
} } |
public class class_name {
private static String convertToValidJavaClassname(String inName) {
if (inName == null) return "_";
if (inName.startsWith("scriptdef_")) inName = inName.substring(10);
if (inName.equals("")) return "_";
StringBuilder output = new StringBuilder(inName.length());
boolean firstChar = true;
for (int i = 0; i < inName.length(); ++i) {
char ch = inName.charAt(i);
if (firstChar && !Character.isJavaIdentifierStart(ch)) {
ch = '_';
} else if (!firstChar
&& !(Character.isJavaIdentifierPart(ch) || ch == '.')) {
ch = '_';
}
firstChar = (ch == '.');
output.append(ch);
}
return output.toString();
} } | public class class_name {
private static String convertToValidJavaClassname(String inName) {
if (inName == null) return "_";
if (inName.startsWith("scriptdef_")) inName = inName.substring(10);
if (inName.equals("")) return "_";
StringBuilder output = new StringBuilder(inName.length());
boolean firstChar = true;
for (int i = 0; i < inName.length(); ++i) {
char ch = inName.charAt(i);
if (firstChar && !Character.isJavaIdentifierStart(ch)) {
ch = '_'; // depends on control dependency: [if], data = [none]
} else if (!firstChar
&& !(Character.isJavaIdentifierPart(ch) || ch == '.')) {
ch = '_'; // depends on control dependency: [if], data = [none]
}
firstChar = (ch == '.'); // depends on control dependency: [for], data = [none]
output.append(ch); // depends on control dependency: [for], data = [none]
}
return output.toString();
} } |
public class class_name {
private Map<SchemaTable, List<Multimap<BiPredicate, RecordId>>> groupIdsBySchemaTable() {
Map<SchemaTable, List<Multimap<BiPredicate, RecordId>>> result = new HashMap<>();
for (HasContainer idHasContainer : this.idHasContainers) {
Map<SchemaTable, Boolean> newHasContainerMap = new HashMap<>();
@SuppressWarnings("unchecked")
P<Object> idPredicate = (P<Object>) idHasContainer.getPredicate();
BiPredicate biPredicate = idHasContainer.getBiPredicate();
//This is statement is for g.V().hasId(Collection) where the logic is actually P.within not P.eq
if (biPredicate == Compare.eq && idPredicate.getValue() instanceof Collection && ((Collection) idPredicate.getValue()).size() > 1) {
biPredicate = Contains.within;
}
Multimap<BiPredicate, RecordId> biPredicateRecordIdMultimap;
if (idPredicate.getValue() instanceof Collection) {
@SuppressWarnings("unchecked")
Collection<Object> ids = (Collection<Object>) idPredicate.getValue();
for (Object id : ids) {
RecordId recordId = RecordId.from(id);
List<Multimap<BiPredicate, RecordId>> biPredicateRecordIdList = result.get(recordId.getSchemaTable());
Boolean newHasContainer = newHasContainerMap.get(recordId.getSchemaTable());
if (biPredicateRecordIdList == null) {
biPredicateRecordIdList = new ArrayList<>();
biPredicateRecordIdMultimap = LinkedListMultimap.create();
biPredicateRecordIdList.add(biPredicateRecordIdMultimap);
result.put(recordId.getSchemaTable(), biPredicateRecordIdList);
newHasContainerMap.put(recordId.getSchemaTable(), false);
} else if (newHasContainer == null) {
biPredicateRecordIdMultimap = LinkedListMultimap.create();
biPredicateRecordIdList.add(biPredicateRecordIdMultimap);
newHasContainerMap.put(recordId.getSchemaTable(), false);
}
biPredicateRecordIdMultimap = biPredicateRecordIdList.get(biPredicateRecordIdList.size() - 1);
biPredicateRecordIdMultimap.put(biPredicate, recordId);
}
} else {
Object id = idPredicate.getValue();
RecordId recordId = RecordId.from(id);
List<Multimap<BiPredicate, RecordId>> biPredicateRecordIdList = result.computeIfAbsent(recordId.getSchemaTable(), k -> new ArrayList<>());
biPredicateRecordIdMultimap = LinkedListMultimap.create();
biPredicateRecordIdList.add(biPredicateRecordIdMultimap);
biPredicateRecordIdMultimap.put(biPredicate, recordId);
}
}
return result;
} } | public class class_name {
private Map<SchemaTable, List<Multimap<BiPredicate, RecordId>>> groupIdsBySchemaTable() {
Map<SchemaTable, List<Multimap<BiPredicate, RecordId>>> result = new HashMap<>();
for (HasContainer idHasContainer : this.idHasContainers) {
Map<SchemaTable, Boolean> newHasContainerMap = new HashMap<>();
@SuppressWarnings("unchecked")
P<Object> idPredicate = (P<Object>) idHasContainer.getPredicate();
BiPredicate biPredicate = idHasContainer.getBiPredicate();
//This is statement is for g.V().hasId(Collection) where the logic is actually P.within not P.eq
if (biPredicate == Compare.eq && idPredicate.getValue() instanceof Collection && ((Collection) idPredicate.getValue()).size() > 1) {
biPredicate = Contains.within; // depends on control dependency: [if], data = [none]
}
Multimap<BiPredicate, RecordId> biPredicateRecordIdMultimap;
if (idPredicate.getValue() instanceof Collection) {
@SuppressWarnings("unchecked")
Collection<Object> ids = (Collection<Object>) idPredicate.getValue();
for (Object id : ids) {
RecordId recordId = RecordId.from(id);
List<Multimap<BiPredicate, RecordId>> biPredicateRecordIdList = result.get(recordId.getSchemaTable());
Boolean newHasContainer = newHasContainerMap.get(recordId.getSchemaTable());
if (biPredicateRecordIdList == null) {
biPredicateRecordIdList = new ArrayList<>(); // depends on control dependency: [if], data = [none]
biPredicateRecordIdMultimap = LinkedListMultimap.create(); // depends on control dependency: [if], data = [none]
biPredicateRecordIdList.add(biPredicateRecordIdMultimap); // depends on control dependency: [if], data = [none]
result.put(recordId.getSchemaTable(), biPredicateRecordIdList); // depends on control dependency: [if], data = [none]
newHasContainerMap.put(recordId.getSchemaTable(), false); // depends on control dependency: [if], data = [none]
} else if (newHasContainer == null) {
biPredicateRecordIdMultimap = LinkedListMultimap.create(); // depends on control dependency: [if], data = [none]
biPredicateRecordIdList.add(biPredicateRecordIdMultimap); // depends on control dependency: [if], data = [none]
newHasContainerMap.put(recordId.getSchemaTable(), false); // depends on control dependency: [if], data = [none]
}
biPredicateRecordIdMultimap = biPredicateRecordIdList.get(biPredicateRecordIdList.size() - 1); // depends on control dependency: [for], data = [none]
biPredicateRecordIdMultimap.put(biPredicate, recordId); // depends on control dependency: [for], data = [none]
}
} else {
Object id = idPredicate.getValue();
RecordId recordId = RecordId.from(id);
List<Multimap<BiPredicate, RecordId>> biPredicateRecordIdList = result.computeIfAbsent(recordId.getSchemaTable(), k -> new ArrayList<>());
biPredicateRecordIdMultimap = LinkedListMultimap.create(); // depends on control dependency: [if], data = [none]
biPredicateRecordIdList.add(biPredicateRecordIdMultimap); // depends on control dependency: [if], data = [none]
biPredicateRecordIdMultimap.put(biPredicate, recordId); // depends on control dependency: [if], data = [none]
}
}
return result;
} } |
public class class_name {
public synchronized boolean createShareRequest(String filePath, Destination destination) {
boolean isSuccessful = false;
SQLiteDatabase db = null;
try {
db = getWritableDatabase();
// Create new record.
ContentValues values = new ContentValues();
values.put(ShareRequestTable.COLUMN_FILE_PATH, filePath);
values.put(ShareRequestTable.COLUMN_DESTINATION, destination.getHash());
values.put(ShareRequestTable.COLUMN_TIME_CREATED, System.currentTimeMillis());
values.put(ShareRequestTable.COLUMN_STATE, ShareRequest.STATE_PENDING);
values.put(ShareRequestTable.COLUMN_FAILS, 0);
isSuccessful = db.insert(ShareRequestTable.NAME, null, values) != ID_ERROR;
sLogger.log(WingsDbHelper.class, "createShareRequest", "isSuccessful=" + isSuccessful + " filePath="
+ filePath + " destination=" + destination.getHash());
} catch (SQLException e) {
// Do nothing.
} finally {
db.close();
}
// Reset retry policy because a new record is created.
RetryPolicy.reset(mContext);
return isSuccessful;
} } | public class class_name {
public synchronized boolean createShareRequest(String filePath, Destination destination) {
boolean isSuccessful = false;
SQLiteDatabase db = null;
try {
db = getWritableDatabase(); // depends on control dependency: [try], data = [none]
// Create new record.
ContentValues values = new ContentValues();
values.put(ShareRequestTable.COLUMN_FILE_PATH, filePath); // depends on control dependency: [try], data = [none]
values.put(ShareRequestTable.COLUMN_DESTINATION, destination.getHash()); // depends on control dependency: [try], data = [none]
values.put(ShareRequestTable.COLUMN_TIME_CREATED, System.currentTimeMillis()); // depends on control dependency: [try], data = [none]
values.put(ShareRequestTable.COLUMN_STATE, ShareRequest.STATE_PENDING); // depends on control dependency: [try], data = [none]
values.put(ShareRequestTable.COLUMN_FAILS, 0); // depends on control dependency: [try], data = [none]
isSuccessful = db.insert(ShareRequestTable.NAME, null, values) != ID_ERROR; // depends on control dependency: [try], data = [none]
sLogger.log(WingsDbHelper.class, "createShareRequest", "isSuccessful=" + isSuccessful + " filePath="
+ filePath + " destination=" + destination.getHash()); // depends on control dependency: [try], data = [none]
} catch (SQLException e) {
// Do nothing.
} finally { // depends on control dependency: [catch], data = [none]
db.close();
}
// Reset retry policy because a new record is created.
RetryPolicy.reset(mContext);
return isSuccessful;
} } |
public class class_name {
public static void deleteQueueMapping(DbConn cnx, int id)
{
QueryResult qr = cnx.runUpdate("dp_delete_by_id", id);
if (qr.nbUpdated != 1)
{
cnx.setRollbackOnly();
throw new JqmAdminApiUserException("no item with ID " + id);
}
} } | public class class_name {
public static void deleteQueueMapping(DbConn cnx, int id)
{
QueryResult qr = cnx.runUpdate("dp_delete_by_id", id);
if (qr.nbUpdated != 1)
{
cnx.setRollbackOnly(); // depends on control dependency: [if], data = [none]
throw new JqmAdminApiUserException("no item with ID " + id);
}
} } |
public class class_name {
@Override
public void update(Result<Integer> result,
int nodeIndex,
String sql,
Object[] args)
{
NodePodAmp node = _podKraken.getNode(nodeIndex);
for (int i = 0; i < node.serverCount(); i++) {
ServerBartender server = node.server(i);
if (server != null && server.isUp()) {
ClusterServiceKraken proxy = _podKraken.getProxy(server);
// XXX: failover
proxy.update(result, nodeIndex, sql, args);
return;
}
}
RuntimeException exn = new ServiceException(L.l("update failed with no live servers"));
exn.fillInStackTrace();
// XXX: fail
result.fail(exn);
} } | public class class_name {
@Override
public void update(Result<Integer> result,
int nodeIndex,
String sql,
Object[] args)
{
NodePodAmp node = _podKraken.getNode(nodeIndex);
for (int i = 0; i < node.serverCount(); i++) {
ServerBartender server = node.server(i);
if (server != null && server.isUp()) {
ClusterServiceKraken proxy = _podKraken.getProxy(server);
// XXX: failover
proxy.update(result, nodeIndex, sql, args); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
}
RuntimeException exn = new ServiceException(L.l("update failed with no live servers"));
exn.fillInStackTrace();
// XXX: fail
result.fail(exn);
} } |
public class class_name {
public void writeFree()
{
Record recClassInfo = this.getMainRecord();
ClassFields recClassFields = new ClassFields(this);
try {
String strFieldName;
String strFieldClass;
// Now, zero out all the class fields
recClassFields.setKeyArea(ClassFields.CLASS_INFO_CLASS_NAME_KEY);
SubFileFilter fileBehavior2 = new SubFileFilter(recClassInfo.getField(ClassInfo.CLASS_NAME), ClassFields.CLASS_INFO_CLASS_NAME, null, null, null, null);
recClassFields.addListener(fileBehavior2); // Only read through the class fields
recClassFields.close();
recClassFields.moveFirst();
this.writeMethodInterface(null, "free", "void", "", "", "Release the objects bound to this record.", null);
m_StreamOut.setTabs(+1);
m_StreamOut.writeit("super.free();\n");
while (recClassFields.hasNext())
{
recClassFields.next();
strFieldName = recClassFields.getField(ClassFields.CLASS_FIELD_NAME).getString();
String strClassFieldType = recClassFields.getField(ClassFields.CLASS_FIELDS_TYPE).toString();
if ((strClassFieldType.equalsIgnoreCase(ClassFieldsTypeField.CLASS_FIELD))
|| (strClassFieldType.equalsIgnoreCase(ClassFieldsTypeField.NATIVE_FIELD)))
if (strFieldName.length() != 0)
if (!recClassFields.getField(ClassFields.CLASS_FIELD_PROTECT).getString().equalsIgnoreCase("S")) // Not static
{
String strReference = "";
if (strClassFieldType.equalsIgnoreCase(ClassFieldsTypeField.CLASS_FIELD))
strReference = "null";
else
{
strReference = recClassFields.getField(ClassFields.CLASS_FIELD_INITIAL).getString();
if (strReference.length() == 0)
{
strReference = "0";
strFieldClass = recClassFields.getField(ClassFields.CLASS_FIELD_CLASS).getString();
if (strFieldClass.equalsIgnoreCase("String"))
strReference = "\"\"";
}
}
if (!strReference.equals("(none)"))
m_StreamOut.writeit("\t" + strFieldName + " = null;\n");
}
}
recClassFields.close();
m_StreamOut.setTabs(-1);
m_StreamOut.writeit("}\n");
} catch (DBException ex) {
ex.printStackTrace();
} finally {
if (recClassFields != null)
recClassFields.free();
}
} } | public class class_name {
public void writeFree()
{
Record recClassInfo = this.getMainRecord();
ClassFields recClassFields = new ClassFields(this);
try {
String strFieldName;
String strFieldClass;
// Now, zero out all the class fields
recClassFields.setKeyArea(ClassFields.CLASS_INFO_CLASS_NAME_KEY);
SubFileFilter fileBehavior2 = new SubFileFilter(recClassInfo.getField(ClassInfo.CLASS_NAME), ClassFields.CLASS_INFO_CLASS_NAME, null, null, null, null);
recClassFields.addListener(fileBehavior2); // Only read through the class fields
recClassFields.close();
recClassFields.moveFirst();
this.writeMethodInterface(null, "free", "void", "", "", "Release the objects bound to this record.", null);
m_StreamOut.setTabs(+1);
m_StreamOut.writeit("super.free();\n");
while (recClassFields.hasNext())
{
recClassFields.next();
strFieldName = recClassFields.getField(ClassFields.CLASS_FIELD_NAME).getString();
String strClassFieldType = recClassFields.getField(ClassFields.CLASS_FIELDS_TYPE).toString();
if ((strClassFieldType.equalsIgnoreCase(ClassFieldsTypeField.CLASS_FIELD))
|| (strClassFieldType.equalsIgnoreCase(ClassFieldsTypeField.NATIVE_FIELD)))
if (strFieldName.length() != 0)
if (!recClassFields.getField(ClassFields.CLASS_FIELD_PROTECT).getString().equalsIgnoreCase("S")) // Not static
{
String strReference = "";
if (strClassFieldType.equalsIgnoreCase(ClassFieldsTypeField.CLASS_FIELD))
strReference = "null";
else
{
strReference = recClassFields.getField(ClassFields.CLASS_FIELD_INITIAL).getString(); // depends on control dependency: [if], data = [none]
if (strReference.length() == 0)
{
strReference = "0"; // depends on control dependency: [if], data = [none]
strFieldClass = recClassFields.getField(ClassFields.CLASS_FIELD_CLASS).getString(); // depends on control dependency: [if], data = [none]
if (strFieldClass.equalsIgnoreCase("String"))
strReference = "\"\"";
}
}
if (!strReference.equals("(none)"))
m_StreamOut.writeit("\t" + strFieldName + " = null;\n");
}
}
recClassFields.close();
m_StreamOut.setTabs(-1);
m_StreamOut.writeit("}\n");
} catch (DBException ex) {
ex.printStackTrace();
} finally {
if (recClassFields != null)
recClassFields.free();
}
} } |
public class class_name {
private void setCdataSectionElements(String key, Properties props)
{
String s = props.getProperty(key);
if (null != s)
{
// Vector of URI/LocalName pairs
Vector v = new Vector();
int l = s.length();
boolean inCurly = false;
StringBuffer buf = new StringBuffer();
// parse through string, breaking on whitespaces. I do this instead
// of a tokenizer so I can track whitespace inside of curly brackets,
// which theoretically shouldn't happen if they contain legal URLs.
for (int i = 0; i < l; i++)
{
char c = s.charAt(i);
if (Character.isWhitespace(c))
{
if (!inCurly)
{
if (buf.length() > 0)
{
addCdataSectionElement(buf.toString(), v);
buf.setLength(0);
}
continue;
}
}
else if ('{' == c)
inCurly = true;
else if ('}' == c)
inCurly = false;
buf.append(c);
}
if (buf.length() > 0)
{
addCdataSectionElement(buf.toString(), v);
buf.setLength(0);
}
// call the official, public method to set the collected names
setCdataSectionElements(v);
}
} } | public class class_name {
private void setCdataSectionElements(String key, Properties props)
{
String s = props.getProperty(key);
if (null != s)
{
// Vector of URI/LocalName pairs
Vector v = new Vector();
int l = s.length();
boolean inCurly = false;
StringBuffer buf = new StringBuffer();
// parse through string, breaking on whitespaces. I do this instead
// of a tokenizer so I can track whitespace inside of curly brackets,
// which theoretically shouldn't happen if they contain legal URLs.
for (int i = 0; i < l; i++)
{
char c = s.charAt(i);
if (Character.isWhitespace(c))
{
if (!inCurly)
{
if (buf.length() > 0)
{
addCdataSectionElement(buf.toString(), v); // depends on control dependency: [if], data = [none]
buf.setLength(0); // depends on control dependency: [if], data = [0)]
}
continue;
}
}
else if ('{' == c)
inCurly = true;
else if ('}' == c)
inCurly = false;
buf.append(c); // depends on control dependency: [for], data = [none]
}
if (buf.length() > 0)
{
addCdataSectionElement(buf.toString(), v); // depends on control dependency: [if], data = [none]
buf.setLength(0); // depends on control dependency: [if], data = [0)]
}
// call the official, public method to set the collected names
setCdataSectionElements(v); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String batchQueryGenerator(List<String> statements) {
StringBuilder sb = new StringBuilder("BEGIN BATCH \n");
for (String statement : statements) {
sb.append(statement).append("\n");
}
sb.append(" APPLY BATCH;");
return sb.toString();
} } | public class class_name {
public static String batchQueryGenerator(List<String> statements) {
StringBuilder sb = new StringBuilder("BEGIN BATCH \n");
for (String statement : statements) {
sb.append(statement).append("\n"); // depends on control dependency: [for], data = [statement]
}
sb.append(" APPLY BATCH;");
return sb.toString();
} } |
public class class_name {
public Instance withBlockDeviceMappings(InstanceBlockDeviceMapping... blockDeviceMappings) {
if (this.blockDeviceMappings == null) {
setBlockDeviceMappings(new com.amazonaws.internal.SdkInternalList<InstanceBlockDeviceMapping>(blockDeviceMappings.length));
}
for (InstanceBlockDeviceMapping ele : blockDeviceMappings) {
this.blockDeviceMappings.add(ele);
}
return this;
} } | public class class_name {
public Instance withBlockDeviceMappings(InstanceBlockDeviceMapping... blockDeviceMappings) {
if (this.blockDeviceMappings == null) {
setBlockDeviceMappings(new com.amazonaws.internal.SdkInternalList<InstanceBlockDeviceMapping>(blockDeviceMappings.length)); // depends on control dependency: [if], data = [none]
}
for (InstanceBlockDeviceMapping ele : blockDeviceMappings) {
this.blockDeviceMappings.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public void marshall(KinesisFirehoseOutput kinesisFirehoseOutput, ProtocolMarshaller protocolMarshaller) {
if (kinesisFirehoseOutput == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(kinesisFirehoseOutput.getResourceARN(), RESOURCEARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(KinesisFirehoseOutput kinesisFirehoseOutput, ProtocolMarshaller protocolMarshaller) {
if (kinesisFirehoseOutput == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(kinesisFirehoseOutput.getResourceARN(), RESOURCEARN_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static ServiceAccount getInstance() {
ServiceAccount account = getInstanceByEnvVars();
if (account == null) {
Map<String, String> env = System.getenv();
String serviceName = env.get(GP_SERVICE_NAME);
String serviceInstanceName = env.get(GP_SERVICE_INSTANCE_NAME);
account = getInstanceByVcapServices(serviceName, serviceInstanceName);
}
return account;
} } | public class class_name {
public static ServiceAccount getInstance() {
ServiceAccount account = getInstanceByEnvVars();
if (account == null) {
Map<String, String> env = System.getenv();
String serviceName = env.get(GP_SERVICE_NAME);
String serviceInstanceName = env.get(GP_SERVICE_INSTANCE_NAME);
account = getInstanceByVcapServices(serviceName, serviceInstanceName); // depends on control dependency: [if], data = [none]
}
return account;
} } |
public class class_name {
public void setDeploymentIds(java.util.Collection<String> deploymentIds) {
if (deploymentIds == null) {
this.deploymentIds = null;
return;
}
this.deploymentIds = new com.amazonaws.internal.SdkInternalList<String>(deploymentIds);
} } | public class class_name {
public void setDeploymentIds(java.util.Collection<String> deploymentIds) {
if (deploymentIds == null) {
this.deploymentIds = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.deploymentIds = new com.amazonaws.internal.SdkInternalList<String>(deploymentIds);
} } |
public class class_name {
public void setAsynchConsumer(AsynchConsumerCallback consumer,
int maxActiveMessages,
long messageLockExpiry,
int maxBatchSize,
OrderingContext orderContext,
int maxSequentialFailures, //SIB0115d.comms
long hiddenMessageDelay,
boolean stoppable) //472879
throws SISessionUnavailableException, SISessionDroppedException,
SIConnectionUnavailableException, SIConnectionDroppedException,
SIErrorException,
SIIncorrectCallException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "setAsynchConsumer",
new Object[]
{
consumer,
maxActiveMessages,
messageLockExpiry,
maxBatchSize,
orderContext,
maxSequentialFailures, //SIB0115d.comms
hiddenMessageDelay,
stoppable //472879
});
if (sessionId == 0)
{
// If the session Id = 0, then no one called setSessionId(). As such we are unable to flow
// to the server as we do not know which session to instruct the server to use.
SIErrorException e = new SIErrorException(
nls.getFormattedMessage("SESSION_ID_HAS_NOT_BEEN_SET_SICO1043", null, null)
);
FFDCFilter.processException(e, CLASS_NAME + ".setAsyncConsumer",
CommsConstants.CONVERSATIONHELPERIMPL_02, this);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Session Id was 0", e);
throw e;
}
CommsByteBuffer request = getCommsByteBuffer();
// Connection object id
request.putShort(connectionObjectId);
// Consumer session id
request.putShort(sessionId);
// Now put the message order context id if we have one
if (orderContext != null)
{
request.putShort(((OrderingContextProxy)orderContext).getId());
}
else
{
request.putShort(CommsConstants.NO_ORDER_CONTEXT);
}
// Client session id - this is the proxy queue ID
request.putShort(proxyQueueId);
// Max active messages
request.putInt(maxActiveMessages);
// Message lock expiry
request.putLong(messageLockExpiry);
// Max batch size
request.putInt(maxBatchSize);
// If callback is Stoppable then send maxSequentialFailures & hiddenMessageDelay then change the
// Segment Id to Stoppable SIB0115d.comms
int JFapSegmentId = JFapChannelConstants.SEG_REGISTER_ASYNC_CONSUMER; //SIB0115d.comms
if (stoppable) { //SIB0115d.comms,472879
request.putInt(maxSequentialFailures); //SIB0115d.comms
request.putLong(hiddenMessageDelay);
JFapSegmentId = JFapChannelConstants.SEG_REGISTER_STOPPABLE_ASYNC_CONSUMER; //SIB0115d.comms
} //SIB0115d.comms
CommsByteBuffer reply = null;
try
{
// Pass on call to server
reply = jfapExchange(request,
JFapSegmentId, //SIB0115d.comms
JFapChannelConstants.PRIORITY_MEDIUM,
true);
}
catch (SIConnectionLostException e)
{
// No FFDC Code needed
// Converting this to a connection dropped as that is all we can throw
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Connection was lost", e);
throw new SIConnectionDroppedException(e.getMessage(), e);
}
// Confirm appropriate data returned
try
{
short err = reply.getCommandCompletionCode(JFapChannelConstants.SEG_REGISTER_ASYNC_CONSUMER_R);
if (err != CommsConstants.SI_NO_EXCEPTION)
{
checkFor_SISessionUnavailableException(reply, err);
checkFor_SISessionDroppedException(reply, err);
checkFor_SIConnectionUnavailableException(reply, err);
checkFor_SIConnectionDroppedException(reply, err);
checkFor_SIIncorrectCallException(reply, err);
checkFor_SIErrorException(reply, err);
defaultChecker(reply, err);
}
}
finally
{
if (reply != null) reply.release();
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "setAsynchConsumer");
} } | public class class_name {
public void setAsynchConsumer(AsynchConsumerCallback consumer,
int maxActiveMessages,
long messageLockExpiry,
int maxBatchSize,
OrderingContext orderContext,
int maxSequentialFailures, //SIB0115d.comms
long hiddenMessageDelay,
boolean stoppable) //472879
throws SISessionUnavailableException, SISessionDroppedException,
SIConnectionUnavailableException, SIConnectionDroppedException,
SIErrorException,
SIIncorrectCallException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "setAsynchConsumer",
new Object[]
{
consumer,
maxActiveMessages,
messageLockExpiry,
maxBatchSize,
orderContext,
maxSequentialFailures, //SIB0115d.comms
hiddenMessageDelay,
stoppable //472879
});
if (sessionId == 0)
{
// If the session Id = 0, then no one called setSessionId(). As such we are unable to flow
// to the server as we do not know which session to instruct the server to use.
SIErrorException e = new SIErrorException(
nls.getFormattedMessage("SESSION_ID_HAS_NOT_BEEN_SET_SICO1043", null, null)
);
FFDCFilter.processException(e, CLASS_NAME + ".setAsyncConsumer",
CommsConstants.CONVERSATIONHELPERIMPL_02, this);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Session Id was 0", e);
throw e;
}
CommsByteBuffer request = getCommsByteBuffer();
// Connection object id
request.putShort(connectionObjectId);
// Consumer session id
request.putShort(sessionId);
// Now put the message order context id if we have one
if (orderContext != null)
{
request.putShort(((OrderingContextProxy)orderContext).getId());
}
else
{
request.putShort(CommsConstants.NO_ORDER_CONTEXT);
}
// Client session id - this is the proxy queue ID
request.putShort(proxyQueueId);
// Max active messages
request.putInt(maxActiveMessages);
// Message lock expiry
request.putLong(messageLockExpiry);
// Max batch size
request.putInt(maxBatchSize);
// If callback is Stoppable then send maxSequentialFailures & hiddenMessageDelay then change the
// Segment Id to Stoppable SIB0115d.comms
int JFapSegmentId = JFapChannelConstants.SEG_REGISTER_ASYNC_CONSUMER; //SIB0115d.comms
if (stoppable) { //SIB0115d.comms,472879
request.putInt(maxSequentialFailures); //SIB0115d.comms
request.putLong(hiddenMessageDelay);
JFapSegmentId = JFapChannelConstants.SEG_REGISTER_STOPPABLE_ASYNC_CONSUMER; //SIB0115d.comms
} //SIB0115d.comms
CommsByteBuffer reply = null;
try
{
// Pass on call to server
reply = jfapExchange(request,
JFapSegmentId, //SIB0115d.comms
JFapChannelConstants.PRIORITY_MEDIUM,
true);
}
catch (SIConnectionLostException e)
{
// No FFDC Code needed
// Converting this to a connection dropped as that is all we can throw
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Connection was lost", e);
throw new SIConnectionDroppedException(e.getMessage(), e);
}
// Confirm appropriate data returned
try
{
short err = reply.getCommandCompletionCode(JFapChannelConstants.SEG_REGISTER_ASYNC_CONSUMER_R);
if (err != CommsConstants.SI_NO_EXCEPTION)
{
checkFor_SISessionUnavailableException(reply, err); // depends on control dependency: [if], data = [none]
checkFor_SISessionDroppedException(reply, err); // depends on control dependency: [if], data = [none]
checkFor_SIConnectionUnavailableException(reply, err); // depends on control dependency: [if], data = [none]
checkFor_SIConnectionDroppedException(reply, err); // depends on control dependency: [if], data = [none]
checkFor_SIIncorrectCallException(reply, err); // depends on control dependency: [if], data = [none]
checkFor_SIErrorException(reply, err); // depends on control dependency: [if], data = [none]
defaultChecker(reply, err); // depends on control dependency: [if], data = [none]
}
}
finally
{
if (reply != null) reply.release();
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "setAsynchConsumer");
} } |
public class class_name {
public SerDe getSerDe() throws IOException {
if (!this.serDe.isPresent()) {
try {
this.serDe = Optional.of(SerDe.class.cast(Class.forName(this.serDeClassName).newInstance()));
} catch (Throwable t) {
throw new IOException("Failed to instantiate SerDe " + this.serDeClassName, t);
}
}
return this.serDe.get();
} } | public class class_name {
public SerDe getSerDe() throws IOException {
if (!this.serDe.isPresent()) {
try {
this.serDe = Optional.of(SerDe.class.cast(Class.forName(this.serDeClassName).newInstance())); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
throw new IOException("Failed to instantiate SerDe " + this.serDeClassName, t);
} // depends on control dependency: [catch], data = [none]
}
return this.serDe.get();
} } |
public class class_name {
public Observable<ServiceResponse<ImageTagCreateSummary>> createImageTagsWithServiceResponseAsync(UUID projectId, List<ImageTagCreateEntry> tags) {
if (projectId == null) {
throw new IllegalArgumentException("Parameter projectId is required and cannot be null.");
}
if (this.client.apiKey() == null) {
throw new IllegalArgumentException("Parameter this.client.apiKey() is required and cannot be null.");
}
Validator.validate(tags);
ImageTagCreateBatch batch = new ImageTagCreateBatch();
batch.withTags(tags);
return service.createImageTags(projectId, this.client.apiKey(), this.client.acceptLanguage(), batch, this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ImageTagCreateSummary>>>() {
@Override
public Observable<ServiceResponse<ImageTagCreateSummary>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ImageTagCreateSummary> clientResponse = createImageTagsDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
} } | public class class_name {
public Observable<ServiceResponse<ImageTagCreateSummary>> createImageTagsWithServiceResponseAsync(UUID projectId, List<ImageTagCreateEntry> tags) {
if (projectId == null) {
throw new IllegalArgumentException("Parameter projectId is required and cannot be null.");
}
if (this.client.apiKey() == null) {
throw new IllegalArgumentException("Parameter this.client.apiKey() is required and cannot be null.");
}
Validator.validate(tags);
ImageTagCreateBatch batch = new ImageTagCreateBatch();
batch.withTags(tags);
return service.createImageTags(projectId, this.client.apiKey(), this.client.acceptLanguage(), batch, this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<ImageTagCreateSummary>>>() {
@Override
public Observable<ServiceResponse<ImageTagCreateSummary>> call(Response<ResponseBody> response) {
try {
ServiceResponse<ImageTagCreateSummary> clientResponse = createImageTagsDelegate(response);
return Observable.just(clientResponse); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
return Observable.error(t);
} // depends on control dependency: [catch], data = [none]
}
});
} } |
public class class_name {
@Nullable
public static String encode (@Nullable final String sValue, @Nonnull final Charset aCharset, final ECodec eCodec)
{
if (sValue == null)
return null;
try
{
switch (eCodec)
{
case Q:
return new RFC1522QCodec (aCharset).getEncoded (sValue);
case B:
default:
return new RFC1522BCodec (aCharset).getEncoded (sValue);
}
}
catch (final Exception ex)
{
return sValue;
}
} } | public class class_name {
@Nullable
public static String encode (@Nullable final String sValue, @Nonnull final Charset aCharset, final ECodec eCodec)
{
if (sValue == null)
return null;
try
{
switch (eCodec)
{
case Q:
return new RFC1522QCodec (aCharset).getEncoded (sValue);
case B:
default:
return new RFC1522BCodec (aCharset).getEncoded (sValue);
}
}
catch (final Exception ex)
{
return sValue;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
boolean declareThrows(JSTypeExpression jsType) {
lazyInitInfo();
if (info.thrownTypes == null) {
info.thrownTypes = new ArrayList<>();
}
info.thrownTypes.add(jsType);
return true;
} } | public class class_name {
boolean declareThrows(JSTypeExpression jsType) {
lazyInitInfo();
if (info.thrownTypes == null) {
info.thrownTypes = new ArrayList<>(); // depends on control dependency: [if], data = [none]
}
info.thrownTypes.add(jsType);
return true;
} } |
public class class_name {
public static String toColorWithAlpha(String red, String green, String blue) {
String defaultAlpha = "FF";
if (red != null && !red.isEmpty()
&& Character.isLowerCase(red.charAt(0))) {
defaultAlpha = defaultAlpha.toLowerCase();
}
return toColorWithAlpha(red, green, blue, defaultAlpha);
} } | public class class_name {
public static String toColorWithAlpha(String red, String green, String blue) {
String defaultAlpha = "FF";
if (red != null && !red.isEmpty()
&& Character.isLowerCase(red.charAt(0))) {
defaultAlpha = defaultAlpha.toLowerCase(); // depends on control dependency: [if], data = [none]
}
return toColorWithAlpha(red, green, blue, defaultAlpha);
} } |
public class class_name {
public FileAnnotation getLink(final long linkHashCode) {
for (FileAnnotation link : links) {
if (link.getKey() == linkHashCode) {
return link;
}
}
throw new NoSuchElementException("Linked annotation not found: key=" + linkHashCode);
} } | public class class_name {
public FileAnnotation getLink(final long linkHashCode) {
for (FileAnnotation link : links) {
if (link.getKey() == linkHashCode) {
return link; // depends on control dependency: [if], data = [none]
}
}
throw new NoSuchElementException("Linked annotation not found: key=" + linkHashCode);
} } |
public class class_name {
void indexPool() {
poolIdx = new int[nextChar()];
poolObj = new Object[poolIdx.length];
int i = 1;
while (i < poolIdx.length) {
poolIdx[i++] = bp;
byte tag = buf[bp++];
switch (tag) {
case CONSTANT_Utf8: case CONSTANT_Unicode: {
int len = nextChar();
bp = bp + len;
break;
}
case CONSTANT_Class:
case CONSTANT_String:
case CONSTANT_MethodType:
bp = bp + 2;
break;
case CONSTANT_MethodHandle:
bp = bp + 3;
break;
case CONSTANT_Fieldref:
case CONSTANT_Methodref:
case CONSTANT_InterfaceMethodref:
case CONSTANT_NameandType:
case CONSTANT_Integer:
case CONSTANT_Float:
case CONSTANT_InvokeDynamic:
bp = bp + 4;
break;
case CONSTANT_Long:
case CONSTANT_Double:
bp = bp + 8;
i++;
break;
default:
throw badClassFile("bad.const.pool.tag.at",
Byte.toString(tag),
Integer.toString(bp -1));
}
}
} } | public class class_name {
void indexPool() {
poolIdx = new int[nextChar()];
poolObj = new Object[poolIdx.length];
int i = 1;
while (i < poolIdx.length) {
poolIdx[i++] = bp; // depends on control dependency: [while], data = [none]
byte tag = buf[bp++];
switch (tag) {
case CONSTANT_Utf8: case CONSTANT_Unicode: {
int len = nextChar();
bp = bp + len;
break;
}
case CONSTANT_Class:
case CONSTANT_String:
case CONSTANT_MethodType:
bp = bp + 2;
break;
case CONSTANT_MethodHandle:
bp = bp + 3;
break;
case CONSTANT_Fieldref:
case CONSTANT_Methodref:
case CONSTANT_InterfaceMethodref:
case CONSTANT_NameandType:
case CONSTANT_Integer:
case CONSTANT_Float:
case CONSTANT_InvokeDynamic:
bp = bp + 4;
break;
case CONSTANT_Long:
case CONSTANT_Double:
bp = bp + 8;
i++; // depends on control dependency: [while], data = [none]
break;
default:
throw badClassFile("bad.const.pool.tag.at",
Byte.toString(tag),
Integer.toString(bp -1));
}
}
} } |
public class class_name {
protected ObjectName consObjectName(String domain, String type,
String keysString) {
String d = domain != null && !"".equals(domain) ? domain
: getDefaultDomain();
String t = type != null && !"".equals(type) ? type : getDefaultType();
String k = keysString != null && !"".equals(keysString) ? ","
+ keysString : "";
try {
return new ObjectName(d + ":type=" + t + k);
} catch (MalformedObjectNameException e) {
throw new IllegalArgumentException(e);
}
} } | public class class_name {
protected ObjectName consObjectName(String domain, String type,
String keysString) {
String d = domain != null && !"".equals(domain) ? domain
: getDefaultDomain();
String t = type != null && !"".equals(type) ? type : getDefaultType();
String k = keysString != null && !"".equals(keysString) ? ","
+ keysString : "";
try {
return new ObjectName(d + ":type=" + t + k);
// depends on control dependency: [try], data = [none]
} catch (MalformedObjectNameException e) {
throw new IllegalArgumentException(e);
}
// depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private boolean isAuthorized(BasicToken token, List<UriTemplate> matchedTemplates, String method) {
StringBuilder path = new StringBuilder();
// Merge all path templates and generate a path.
for (UriTemplate template : matchedTemplates) {
path.insert(0, template.getTemplate());
}
path.append(":").append(method);
//Look at user permissions to see if the service is permitted.
return token.getPermissions().contains(path.toString());
} } | public class class_name {
private boolean isAuthorized(BasicToken token, List<UriTemplate> matchedTemplates, String method) {
StringBuilder path = new StringBuilder();
// Merge all path templates and generate a path.
for (UriTemplate template : matchedTemplates) {
path.insert(0, template.getTemplate()); // depends on control dependency: [for], data = [template]
}
path.append(":").append(method);
//Look at user permissions to see if the service is permitted.
return token.getPermissions().contains(path.toString());
} } |
public class class_name {
public XFeatureCall createReferenceToSuper() {
final XExpression expr = getXExpression();
XtendTypeDeclaration type = EcoreUtil2.getContainerOfType(expr, XtendTypeDeclaration.class);
JvmType jvmObject = getAssociatedElement(JvmType.class, type, expr.eResource());
final XFeatureCall superFeature = XbaseFactory.eINSTANCE.createXFeatureCall();
JvmIdentifiableElement feature;
if (jvmObject instanceof JvmDeclaredType) {
feature = ((JvmDeclaredType) jvmObject).getExtendedClass().getType();
} else {
feature = findType(expr, getQualifiedName(type)).getType();
if (feature instanceof JvmDeclaredType) {
feature = ((JvmDeclaredType) feature).getExtendedClass().getType();
} else {
feature = null;
}
}
if (feature == null) {
return null;
}
superFeature.setFeature(feature);
return superFeature;
} } | public class class_name {
public XFeatureCall createReferenceToSuper() {
final XExpression expr = getXExpression();
XtendTypeDeclaration type = EcoreUtil2.getContainerOfType(expr, XtendTypeDeclaration.class);
JvmType jvmObject = getAssociatedElement(JvmType.class, type, expr.eResource());
final XFeatureCall superFeature = XbaseFactory.eINSTANCE.createXFeatureCall();
JvmIdentifiableElement feature;
if (jvmObject instanceof JvmDeclaredType) {
feature = ((JvmDeclaredType) jvmObject).getExtendedClass().getType(); // depends on control dependency: [if], data = [none]
} else {
feature = findType(expr, getQualifiedName(type)).getType(); // depends on control dependency: [if], data = [none]
if (feature instanceof JvmDeclaredType) {
feature = ((JvmDeclaredType) feature).getExtendedClass().getType(); // depends on control dependency: [if], data = [none]
} else {
feature = null; // depends on control dependency: [if], data = [none]
}
}
if (feature == null) {
return null; // depends on control dependency: [if], data = [none]
}
superFeature.setFeature(feature);
return superFeature;
} } |
public class class_name {
public static LinkedHashMap<String, Type> resolveGenerics(final Type type,
final Map<String, Type> generics) {
Type actual = type;
if (type instanceof ParameterizedType) {
// if parameterized type is not correct (contain only raw type without type arguments
// (possible for instance types) then this call could unwrap it to pure class
actual = GenericsUtils.resolveTypeVariables(type, generics);
}
final LinkedHashMap<String, Type> res;
if (actual instanceof ParameterizedType) {
final ParameterizedType actualType = (ParameterizedType) actual;
final Type[] genericTypes = actualType.getActualTypeArguments();
final Class target = (Class) actualType.getRawType();
final TypeVariable[] genericNames = target.getTypeParameters();
// inner class can use outer class generics
res = fillOuterGenerics(actual, new LinkedHashMap<String, Type>(), null);
final int cnt = genericNames.length;
for (int i = 0; i < cnt; i++) {
res.put(genericNames[i].getName(), genericTypes[i]);
}
} else {
res = resolveRawGenerics(GenericsUtils.resolveClass(actual, generics));
}
return res;
} } | public class class_name {
public static LinkedHashMap<String, Type> resolveGenerics(final Type type,
final Map<String, Type> generics) {
Type actual = type;
if (type instanceof ParameterizedType) {
// if parameterized type is not correct (contain only raw type without type arguments
// (possible for instance types) then this call could unwrap it to pure class
actual = GenericsUtils.resolveTypeVariables(type, generics); // depends on control dependency: [if], data = [none]
}
final LinkedHashMap<String, Type> res;
if (actual instanceof ParameterizedType) {
final ParameterizedType actualType = (ParameterizedType) actual;
final Type[] genericTypes = actualType.getActualTypeArguments();
final Class target = (Class) actualType.getRawType(); // depends on control dependency: [if], data = [none]
final TypeVariable[] genericNames = target.getTypeParameters();
// inner class can use outer class generics
res = fillOuterGenerics(actual, new LinkedHashMap<String, Type>(), null); // depends on control dependency: [if], data = [none]
final int cnt = genericNames.length;
for (int i = 0; i < cnt; i++) {
res.put(genericNames[i].getName(), genericTypes[i]); // depends on control dependency: [for], data = [i]
}
} else {
res = resolveRawGenerics(GenericsUtils.resolveClass(actual, generics)); // depends on control dependency: [if], data = [none]
}
return res;
} } |
public class class_name {
public void buildFieldDeprecationInfo(XMLNode node, Content fieldsContentTree) {
if (!utils.definesSerializableFields(currentTypeElement)) {
fieldWriter.addMemberDeprecatedInfo((VariableElement)currentMember,
fieldsContentTree);
}
} } | public class class_name {
public void buildFieldDeprecationInfo(XMLNode node, Content fieldsContentTree) {
if (!utils.definesSerializableFields(currentTypeElement)) {
fieldWriter.addMemberDeprecatedInfo((VariableElement)currentMember,
fieldsContentTree); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public Map<String, Object> getMBeanResult(final String mbeanName) {
final Map<String, Object> ret = new HashMap<>();
try {
final ObjectName name = new ObjectName(mbeanName);
final MBeanInfo info = getMBeanInfo(name);
final MBeanAttributeInfo[] mbeanAttrs = info.getAttributes();
final Map<String, Object> attributes = new TreeMap<>();
for (final MBeanAttributeInfo attrInfo : mbeanAttrs) {
final Object obj = getMBeanAttribute(name, attrInfo.getName());
attributes.put(attrInfo.getName(), obj);
}
ret.put("attributes", attributes);
} catch (final Exception e) {
logger.error("Invalid MBean Name. name = " + mbeanName, e);
ret.put("error", "'" + mbeanName + "' is not a valid mBean name");
}
return ret;
} } | public class class_name {
public Map<String, Object> getMBeanResult(final String mbeanName) {
final Map<String, Object> ret = new HashMap<>();
try {
final ObjectName name = new ObjectName(mbeanName);
final MBeanInfo info = getMBeanInfo(name);
final MBeanAttributeInfo[] mbeanAttrs = info.getAttributes();
final Map<String, Object> attributes = new TreeMap<>();
for (final MBeanAttributeInfo attrInfo : mbeanAttrs) {
final Object obj = getMBeanAttribute(name, attrInfo.getName());
attributes.put(attrInfo.getName(), obj); // depends on control dependency: [for], data = [attrInfo]
}
ret.put("attributes", attributes); // depends on control dependency: [try], data = [none]
} catch (final Exception e) {
logger.error("Invalid MBean Name. name = " + mbeanName, e);
ret.put("error", "'" + mbeanName + "' is not a valid mBean name");
} // depends on control dependency: [catch], data = [none]
return ret;
} } |
public class class_name {
private void createImageValidation(File imageFile) throws IOException {
synchronized (imageValidatorLock) {
InjectionHandler.processEvent(InjectionEvent.STANDBY_VALIDATE_CREATE);
if (!running) {
// fails the checkpoint
InjectionHandler.processEvent(InjectionEvent.STANDBY_VALIDATE_CREATE_FAIL);
throw new IOException("Standby: standby is quiescing");
}
imageValidator = new ImageValidator(imageFile);
imageValidator.start();
}
} } | public class class_name {
private void createImageValidation(File imageFile) throws IOException {
synchronized (imageValidatorLock) {
InjectionHandler.processEvent(InjectionEvent.STANDBY_VALIDATE_CREATE);
if (!running) {
// fails the checkpoint
InjectionHandler.processEvent(InjectionEvent.STANDBY_VALIDATE_CREATE_FAIL); // depends on control dependency: [if], data = [none]
throw new IOException("Standby: standby is quiescing");
}
imageValidator = new ImageValidator(imageFile);
imageValidator.start();
}
} } |
public class class_name {
protected void uploadFile(final FTPClient ftpClient, final String sourceFilePath, final String targetFilePath,
final String logPrefix) throws IOException {
log.info(String.format(UPLOAD_FILE, logPrefix, sourceFilePath, targetFilePath));
final File sourceFile = new File(sourceFilePath);
try (final InputStream is = new FileInputStream(sourceFile)) {
ftpClient.changeWorkingDirectory(targetFilePath);
ftpClient.storeFile(sourceFile.getName(), is);
final int replyCode = ftpClient.getReplyCode();
final String replyMessage = ftpClient.getReplyString();
if (isCommandFailed(replyCode)) {
log.error(String.format(UPLOAD_FILE_REPLY, logPrefix, replyMessage));
throw new IOException("Failed to upload file: " + sourceFilePath);
} else {
log.info(String.format(UPLOAD_FILE_REPLY, logPrefix, replyMessage));
}
}
} } | public class class_name {
protected void uploadFile(final FTPClient ftpClient, final String sourceFilePath, final String targetFilePath,
final String logPrefix) throws IOException {
log.info(String.format(UPLOAD_FILE, logPrefix, sourceFilePath, targetFilePath));
final File sourceFile = new File(sourceFilePath);
try (final InputStream is = new FileInputStream(sourceFile)) {
ftpClient.changeWorkingDirectory(targetFilePath);
ftpClient.storeFile(sourceFile.getName(), is);
final int replyCode = ftpClient.getReplyCode();
final String replyMessage = ftpClient.getReplyString();
if (isCommandFailed(replyCode)) {
log.error(String.format(UPLOAD_FILE_REPLY, logPrefix, replyMessage)); // depends on control dependency: [if], data = [none]
throw new IOException("Failed to upload file: " + sourceFilePath);
} else {
log.info(String.format(UPLOAD_FILE_REPLY, logPrefix, replyMessage)); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
private void storePatient(final Patient patient) {
try {
patients.put(patient.getIdentifierFirstRep().getValue(), patient);
// if storing is successful the notify the listeners that listens on
// any patient => patient/*
final String bundleToString = currentPatientsAsJsonString();
broadcaster
.broadcast(new OutboundEvent.Builder().name("patients").data(String.class, bundleToString).build());
} catch (final Exception e) {
e.printStackTrace();
}
} } | public class class_name {
private void storePatient(final Patient patient) {
try {
patients.put(patient.getIdentifierFirstRep().getValue(), patient); // depends on control dependency: [try], data = [none]
// if storing is successful the notify the listeners that listens on
// any patient => patient/*
final String bundleToString = currentPatientsAsJsonString();
broadcaster
.broadcast(new OutboundEvent.Builder().name("patients").data(String.class, bundleToString).build()); // depends on control dependency: [try], data = [none]
} catch (final Exception e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public com.google.privacy.dlp.v2.ReplaceWithInfoTypeConfigOrBuilder
getReplaceWithInfoTypeConfigOrBuilder() {
if (transformationCase_ == 7) {
return (com.google.privacy.dlp.v2.ReplaceWithInfoTypeConfig) transformation_;
}
return com.google.privacy.dlp.v2.ReplaceWithInfoTypeConfig.getDefaultInstance();
} } | public class class_name {
public com.google.privacy.dlp.v2.ReplaceWithInfoTypeConfigOrBuilder
getReplaceWithInfoTypeConfigOrBuilder() {
if (transformationCase_ == 7) {
return (com.google.privacy.dlp.v2.ReplaceWithInfoTypeConfig) transformation_; // depends on control dependency: [if], data = [none]
}
return com.google.privacy.dlp.v2.ReplaceWithInfoTypeConfig.getDefaultInstance();
} } |
public class class_name {
private static Path toPath(FileSystem fileSystem, URI uri) {
// We have to invoke this method by reflection because while the file system should be
// an instance of JimfsFileSystem, it may be loaded by a different class loader and as
// such appear to be a totally different class.
try {
Method toPath = fileSystem.getClass().getDeclaredMethod("toPath", URI.class);
return (Path) toPath.invoke(fileSystem, uri);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("invalid file system: " + fileSystem);
} catch (InvocationTargetException | IllegalAccessException e) {
throw new RuntimeException(e);
}
} } | public class class_name {
private static Path toPath(FileSystem fileSystem, URI uri) {
// We have to invoke this method by reflection because while the file system should be
// an instance of JimfsFileSystem, it may be loaded by a different class loader and as
// such appear to be a totally different class.
try {
Method toPath = fileSystem.getClass().getDeclaredMethod("toPath", URI.class);
return (Path) toPath.invoke(fileSystem, uri); // depends on control dependency: [try], data = [none]
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("invalid file system: " + fileSystem);
} catch (InvocationTargetException | IllegalAccessException e) { // depends on control dependency: [catch], data = [none]
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public boolean exists() {
if (this.cachedResource != null)
return true;
try {
InputStream is = get();
if (is != null) {
is.close();
return true;
}
return false;
} catch (IOException ex) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "IOException while checking existence of resource", ex);
return false;
}
} } | public class class_name {
@Override
public boolean exists() {
if (this.cachedResource != null)
return true;
try {
InputStream is = get();
if (is != null) {
is.close(); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false; // depends on control dependency: [try], data = [none]
} catch (IOException ex) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "IOException while checking existence of resource", ex);
return false;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private static ObjectIdentifier algOID(String name) throws IOException {
// See if algname is in printable OID ("dot-dot") notation
if (name.indexOf('.') != -1) {
if (name.startsWith("OID.")) {
return new ObjectIdentifier(name.substring("OID.".length()));
} else {
return new ObjectIdentifier(name);
}
}
// Digesting algorithms
if (name.equalsIgnoreCase("MD5")) {
return AlgorithmId.MD5_oid;
}
if (name.equalsIgnoreCase("MD2")) {
return AlgorithmId.MD2_oid;
}
if (name.equalsIgnoreCase("SHA") || name.equalsIgnoreCase("SHA1")
|| name.equalsIgnoreCase("SHA-1")) {
return AlgorithmId.SHA_oid;
}
if (name.equalsIgnoreCase("SHA-256") ||
name.equalsIgnoreCase("SHA256")) {
return AlgorithmId.SHA256_oid;
}
if (name.equalsIgnoreCase("SHA-384") ||
name.equalsIgnoreCase("SHA384")) {
return AlgorithmId.SHA384_oid;
}
if (name.equalsIgnoreCase("SHA-512") ||
name.equalsIgnoreCase("SHA512")) {
return AlgorithmId.SHA512_oid;
}
// Various public key algorithms
if (name.equalsIgnoreCase("RSA")) {
return AlgorithmId.RSAEncryption_oid;
}
if (name.equalsIgnoreCase("Diffie-Hellman")
|| name.equalsIgnoreCase("DH")) {
return AlgorithmId.DH_oid;
}
if (name.equalsIgnoreCase("DSA")) {
return AlgorithmId.DSA_oid;
}
if (name.equalsIgnoreCase("EC")) {
return EC_oid;
}
// Common signature types
if (name.equalsIgnoreCase("MD5withRSA")
|| name.equalsIgnoreCase("MD5/RSA")) {
return AlgorithmId.md5WithRSAEncryption_oid;
}
if (name.equalsIgnoreCase("MD2withRSA")
|| name.equalsIgnoreCase("MD2/RSA")) {
return AlgorithmId.md2WithRSAEncryption_oid;
}
if (name.equalsIgnoreCase("SHAwithDSA")
|| name.equalsIgnoreCase("SHA1withDSA")
|| name.equalsIgnoreCase("SHA/DSA")
|| name.equalsIgnoreCase("SHA1/DSA")
|| name.equalsIgnoreCase("DSAWithSHA1")
|| name.equalsIgnoreCase("DSS")
|| name.equalsIgnoreCase("SHA-1/DSA")) {
return AlgorithmId.sha1WithDSA_oid;
}
if (name.equalsIgnoreCase("SHA1WithRSA")
|| name.equalsIgnoreCase("SHA1/RSA")) {
return AlgorithmId.sha1WithRSAEncryption_oid;
}
if (name.equalsIgnoreCase("SHA1withECDSA")
|| name.equalsIgnoreCase("ECDSA")) {
return AlgorithmId.sha1WithECDSA_oid;
}
if (name.equalsIgnoreCase("SHA224withECDSA")) {
return AlgorithmId.sha224WithECDSA_oid;
}
if (name.equalsIgnoreCase("SHA256withECDSA")) {
return AlgorithmId.sha256WithECDSA_oid;
}
if (name.equalsIgnoreCase("SHA384withECDSA")) {
return AlgorithmId.sha384WithECDSA_oid;
}
if (name.equalsIgnoreCase("SHA512withECDSA")) {
return AlgorithmId.sha512WithECDSA_oid;
}
// See if any of the installed providers supply a mapping from
// the given algorithm name to an OID string
synchronized (oidTable) {
reinitializeMappingTableLocked();
return oidTable.get(name.toUpperCase(Locale.ENGLISH));
}
} } | public class class_name {
private static ObjectIdentifier algOID(String name) throws IOException {
// See if algname is in printable OID ("dot-dot") notation
if (name.indexOf('.') != -1) {
if (name.startsWith("OID.")) {
return new ObjectIdentifier(name.substring("OID.".length())); // depends on control dependency: [if], data = [none]
} else {
return new ObjectIdentifier(name); // depends on control dependency: [if], data = [none]
}
}
// Digesting algorithms
if (name.equalsIgnoreCase("MD5")) {
return AlgorithmId.MD5_oid;
}
if (name.equalsIgnoreCase("MD2")) {
return AlgorithmId.MD2_oid;
}
if (name.equalsIgnoreCase("SHA") || name.equalsIgnoreCase("SHA1")
|| name.equalsIgnoreCase("SHA-1")) {
return AlgorithmId.SHA_oid;
}
if (name.equalsIgnoreCase("SHA-256") ||
name.equalsIgnoreCase("SHA256")) {
return AlgorithmId.SHA256_oid;
}
if (name.equalsIgnoreCase("SHA-384") ||
name.equalsIgnoreCase("SHA384")) {
return AlgorithmId.SHA384_oid;
}
if (name.equalsIgnoreCase("SHA-512") ||
name.equalsIgnoreCase("SHA512")) {
return AlgorithmId.SHA512_oid;
}
// Various public key algorithms
if (name.equalsIgnoreCase("RSA")) {
return AlgorithmId.RSAEncryption_oid;
}
if (name.equalsIgnoreCase("Diffie-Hellman")
|| name.equalsIgnoreCase("DH")) {
return AlgorithmId.DH_oid;
}
if (name.equalsIgnoreCase("DSA")) {
return AlgorithmId.DSA_oid;
}
if (name.equalsIgnoreCase("EC")) {
return EC_oid;
}
// Common signature types
if (name.equalsIgnoreCase("MD5withRSA")
|| name.equalsIgnoreCase("MD5/RSA")) {
return AlgorithmId.md5WithRSAEncryption_oid;
}
if (name.equalsIgnoreCase("MD2withRSA")
|| name.equalsIgnoreCase("MD2/RSA")) {
return AlgorithmId.md2WithRSAEncryption_oid;
}
if (name.equalsIgnoreCase("SHAwithDSA")
|| name.equalsIgnoreCase("SHA1withDSA")
|| name.equalsIgnoreCase("SHA/DSA")
|| name.equalsIgnoreCase("SHA1/DSA")
|| name.equalsIgnoreCase("DSAWithSHA1")
|| name.equalsIgnoreCase("DSS")
|| name.equalsIgnoreCase("SHA-1/DSA")) {
return AlgorithmId.sha1WithDSA_oid;
}
if (name.equalsIgnoreCase("SHA1WithRSA")
|| name.equalsIgnoreCase("SHA1/RSA")) {
return AlgorithmId.sha1WithRSAEncryption_oid;
}
if (name.equalsIgnoreCase("SHA1withECDSA")
|| name.equalsIgnoreCase("ECDSA")) {
return AlgorithmId.sha1WithECDSA_oid;
}
if (name.equalsIgnoreCase("SHA224withECDSA")) {
return AlgorithmId.sha224WithECDSA_oid;
}
if (name.equalsIgnoreCase("SHA256withECDSA")) {
return AlgorithmId.sha256WithECDSA_oid;
}
if (name.equalsIgnoreCase("SHA384withECDSA")) {
return AlgorithmId.sha384WithECDSA_oid;
}
if (name.equalsIgnoreCase("SHA512withECDSA")) {
return AlgorithmId.sha512WithECDSA_oid;
}
// See if any of the installed providers supply a mapping from
// the given algorithm name to an OID string
synchronized (oidTable) {
reinitializeMappingTableLocked();
return oidTable.get(name.toUpperCase(Locale.ENGLISH));
}
} } |
public class class_name {
public int lookupIndex (long entry)
{
int ret = map.get(entry);
if (ret <= 0 && !growthStopped) {
numEntries++;
ret = numEntries;
map.put (entry, ret);
}
return ret - 1; // feature id should be 0-based
} } | public class class_name {
public int lookupIndex (long entry)
{
int ret = map.get(entry);
if (ret <= 0 && !growthStopped) {
numEntries++; // depends on control dependency: [if], data = [none]
ret = numEntries; // depends on control dependency: [if], data = [none]
map.put (entry, ret); // depends on control dependency: [if], data = [none]
}
return ret - 1; // feature id should be 0-based
} } |
public class class_name {
private SqlModality deduceModality(SqlNode query) {
if (query instanceof SqlSelect) {
SqlSelect select = (SqlSelect) query;
return select.getModifierNode(SqlSelectKeyword.STREAM) != null
? SqlModality.STREAM
: SqlModality.RELATION;
} else if (query.getKind() == SqlKind.VALUES) {
return SqlModality.RELATION;
} else {
assert query.isA(SqlKind.SET_QUERY);
final SqlCall call = (SqlCall) query;
return deduceModality(call.getOperandList().get(0));
}
} } | public class class_name {
private SqlModality deduceModality(SqlNode query) {
if (query instanceof SqlSelect) {
SqlSelect select = (SqlSelect) query;
return select.getModifierNode(SqlSelectKeyword.STREAM) != null
? SqlModality.STREAM
: SqlModality.RELATION; // depends on control dependency: [if], data = [none]
} else if (query.getKind() == SqlKind.VALUES) {
return SqlModality.RELATION; // depends on control dependency: [if], data = [none]
} else {
assert query.isA(SqlKind.SET_QUERY); // depends on control dependency: [if], data = [none]
final SqlCall call = (SqlCall) query;
return deduceModality(call.getOperandList().get(0)); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static void closeAllInfoWindowsOn(MapView mapView) {
ArrayList<InfoWindow> opened = getOpenedInfoWindowsOn(mapView);
for (InfoWindow infoWindow : opened) {
infoWindow.close();
}
} } | public class class_name {
public static void closeAllInfoWindowsOn(MapView mapView) {
ArrayList<InfoWindow> opened = getOpenedInfoWindowsOn(mapView);
for (InfoWindow infoWindow : opened) {
infoWindow.close(); // depends on control dependency: [for], data = [infoWindow]
}
} } |
public class class_name {
private ApplicationDefinition checkApplicationKey(ApplicationDefinition appDef) {
Tenant tenant = Tenant.getTenant(appDef);
ApplicationDefinition currAppDef = getApplication(tenant, appDef.getAppName());
if (currAppDef == null) {
m_logger.info("Defining application: {}", appDef.getAppName());
} else {
m_logger.info("Updating application: {}", appDef.getAppName());
String appKey = currAppDef.getKey();
Utils.require(Utils.isEmpty(appKey) || appKey.equals(appDef.getKey()),
"Application key cannot be changed: %s", appDef.getKey());
}
return currAppDef;
} } | public class class_name {
private ApplicationDefinition checkApplicationKey(ApplicationDefinition appDef) {
Tenant tenant = Tenant.getTenant(appDef);
ApplicationDefinition currAppDef = getApplication(tenant, appDef.getAppName());
if (currAppDef == null) {
m_logger.info("Defining application: {}", appDef.getAppName());
// depends on control dependency: [if], data = [none]
} else {
m_logger.info("Updating application: {}", appDef.getAppName());
// depends on control dependency: [if], data = [none]
String appKey = currAppDef.getKey();
Utils.require(Utils.isEmpty(appKey) || appKey.equals(appDef.getKey()),
"Application key cannot be changed: %s", appDef.getKey());
// depends on control dependency: [if], data = [none]
}
return currAppDef;
} } |
public class class_name {
public java.util.List<DeliveryChannel> getDeliveryChannels() {
if (deliveryChannels == null) {
deliveryChannels = new com.amazonaws.internal.SdkInternalList<DeliveryChannel>();
}
return deliveryChannels;
} } | public class class_name {
public java.util.List<DeliveryChannel> getDeliveryChannels() {
if (deliveryChannels == null) {
deliveryChannels = new com.amazonaws.internal.SdkInternalList<DeliveryChannel>(); // depends on control dependency: [if], data = [none]
}
return deliveryChannels;
} } |
public class class_name {
private static boolean canBeContinued(final String fileName, final List<Policy> policies) {
boolean result = true;
for (Policy policy : policies) {
result &= policy.continueExistingFile(fileName);
}
return result;
} } | public class class_name {
private static boolean canBeContinued(final String fileName, final List<Policy> policies) {
boolean result = true;
for (Policy policy : policies) {
result &= policy.continueExistingFile(fileName); // depends on control dependency: [for], data = [policy]
}
return result;
} } |
public class class_name {
public static <T extends ImageGray<T>>T average(Planar<T> input , T output ) {
Class type = input.getBandType();
if( type == GrayU8.class ) {
return (T)ConvertImage.average((Planar<GrayU8>)input,(GrayU8)output);
} else if( type == GrayS8.class ) {
return (T)ConvertImage.average((Planar<GrayS8>)input,(GrayS8)output);
} else if( type == GrayU16.class ) {
return (T)ConvertImage.average((Planar<GrayU16>)input,(GrayU16)output);
} else if( type == GrayS16.class ) {
return (T)ConvertImage.average((Planar<GrayS16>)input,(GrayS16)output);
} else if( type == GrayS32.class ) {
return (T)ConvertImage.average((Planar<GrayS32>)input,(GrayS32)output);
} else if( type == GrayS64.class ) {
return (T)ConvertImage.average((Planar<GrayS64>)input,(GrayS64)output);
} else if( type == GrayF32.class ) {
return (T)ConvertImage.average((Planar<GrayF32>)input,(GrayF32)output);
} else if( type == GrayF64.class ) {
return (T)ConvertImage.average((Planar<GrayF64>)input,(GrayF64)output);
} else {
throw new IllegalArgumentException("Unknown image type: "+type.getSimpleName());
}
} } | public class class_name {
public static <T extends ImageGray<T>>T average(Planar<T> input , T output ) {
Class type = input.getBandType();
if( type == GrayU8.class ) {
return (T)ConvertImage.average((Planar<GrayU8>)input,(GrayU8)output); // depends on control dependency: [if], data = [none]
} else if( type == GrayS8.class ) {
return (T)ConvertImage.average((Planar<GrayS8>)input,(GrayS8)output); // depends on control dependency: [if], data = [none]
} else if( type == GrayU16.class ) {
return (T)ConvertImage.average((Planar<GrayU16>)input,(GrayU16)output); // depends on control dependency: [if], data = [none]
} else if( type == GrayS16.class ) {
return (T)ConvertImage.average((Planar<GrayS16>)input,(GrayS16)output); // depends on control dependency: [if], data = [none]
} else if( type == GrayS32.class ) {
return (T)ConvertImage.average((Planar<GrayS32>)input,(GrayS32)output); // depends on control dependency: [if], data = [none]
} else if( type == GrayS64.class ) {
return (T)ConvertImage.average((Planar<GrayS64>)input,(GrayS64)output); // depends on control dependency: [if], data = [none]
} else if( type == GrayF32.class ) {
return (T)ConvertImage.average((Planar<GrayF32>)input,(GrayF32)output); // depends on control dependency: [if], data = [none]
} else if( type == GrayF64.class ) {
return (T)ConvertImage.average((Planar<GrayF64>)input,(GrayF64)output); // depends on control dependency: [if], data = [none]
} else {
throw new IllegalArgumentException("Unknown image type: "+type.getSimpleName());
}
} } |
public class class_name {
public <T extends BaseBugLinkStrategy<?>> T create(final BugLinkType type, final String serverUrl,
final Object... additionalArgs) {
// Check that the internals are registered
if (!internalsRegistered) {
registerInternals();
}
if (map.containsKey(type)) {
try {
final SortedSet<Helper> helpers = map.get(type);
T helper = null;
for (final Helper definedHelper : helpers) {
if (definedHelper.useHelper(serverUrl)) {
helper = (T) definedHelper.getHelperClass().newInstance();
break;
}
}
if (helper != null) {
helper.initialise(serverUrl, additionalArgs);
}
return helper;
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
return null;
}
} } | public class class_name {
public <T extends BaseBugLinkStrategy<?>> T create(final BugLinkType type, final String serverUrl,
final Object... additionalArgs) {
// Check that the internals are registered
if (!internalsRegistered) {
registerInternals();
}
if (map.containsKey(type)) {
try {
final SortedSet<Helper> helpers = map.get(type);
T helper = null;
for (final Helper definedHelper : helpers) {
if (definedHelper.useHelper(serverUrl)) {
helper = (T) definedHelper.getHelperClass().newInstance(); // depends on control dependency: [if], data = [none]
break;
}
}
if (helper != null) {
helper.initialise(serverUrl, additionalArgs);
}
return helper;
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
return null;
}
} } |
public class class_name {
private Set<Permission> resolvePermissions(Account account) {
if (accountPermissionResolver != null) {
return accountPermissionResolver.resolvePermissions(account);
}
return Collections.emptySet();
} } | public class class_name {
private Set<Permission> resolvePermissions(Account account) {
if (accountPermissionResolver != null) {
return accountPermissionResolver.resolvePermissions(account); // depends on control dependency: [if], data = [none]
}
return Collections.emptySet();
} } |
public class class_name {
@Deprecated
public Collection<Double> getSamples(String keyword, SampleType sampleType) {
if (!keywords.contains(keyword)) {
return null;
}
Set<Double> result = new TreeSet<Double>();
if (rules.hasExplicitBoundingInfo) {
FixedDecimalSamples samples = rules.getDecimalSamples(keyword, sampleType);
return samples == null ? Collections.unmodifiableSet(result)
: Collections.unmodifiableSet(samples.addSamples(result));
}
// hack in case the rule is created without explicit samples
int maxCount = isLimited(keyword, sampleType) ? Integer.MAX_VALUE : 20;
switch (sampleType) {
case INTEGER:
for (int i = 0; i < 200; ++i) {
if (!addSample(keyword, i, maxCount, result)) {
break;
}
}
addSample(keyword, 1000000, maxCount, result); // hack for Welsh
break;
case DECIMAL:
for (int i = 0; i < 2000; ++i) {
if (!addSample(keyword, new FixedDecimal(i/10d, 1), maxCount, result)) {
break;
}
}
addSample(keyword, new FixedDecimal(1000000d, 1), maxCount, result); // hack for Welsh
break;
}
return result.size() == 0 ? null : Collections.unmodifiableSet(result);
} } | public class class_name {
@Deprecated
public Collection<Double> getSamples(String keyword, SampleType sampleType) {
if (!keywords.contains(keyword)) {
return null; // depends on control dependency: [if], data = [none]
}
Set<Double> result = new TreeSet<Double>();
if (rules.hasExplicitBoundingInfo) {
FixedDecimalSamples samples = rules.getDecimalSamples(keyword, sampleType);
return samples == null ? Collections.unmodifiableSet(result)
: Collections.unmodifiableSet(samples.addSamples(result)); // depends on control dependency: [if], data = [none]
}
// hack in case the rule is created without explicit samples
int maxCount = isLimited(keyword, sampleType) ? Integer.MAX_VALUE : 20;
switch (sampleType) {
case INTEGER:
for (int i = 0; i < 200; ++i) {
if (!addSample(keyword, i, maxCount, result)) {
break;
}
}
addSample(keyword, 1000000, maxCount, result); // hack for Welsh
break;
case DECIMAL:
for (int i = 0; i < 2000; ++i) {
if (!addSample(keyword, new FixedDecimal(i/10d, 1), maxCount, result)) {
break;
}
}
addSample(keyword, new FixedDecimal(1000000d, 1), maxCount, result); // hack for Welsh
break;
}
return result.size() == 0 ? null : Collections.unmodifiableSet(result);
} } |
public class class_name {
protected static <S extends IPAddressSegment> void normalizePrefixBoundary(
int sectionPrefixBits,
S segments[],
int segmentBitCount,
int segmentByteCount,
BiFunction<S, Integer, S> segProducer) {
//we've already verified segment prefixes in super constructor. We simply need to check the case where the prefix is at a segment boundary,
//whether the network side has the correct prefix
int networkSegmentIndex = getNetworkSegmentIndex(sectionPrefixBits, segmentByteCount, segmentBitCount);
if(networkSegmentIndex >= 0) {
S segment = segments[networkSegmentIndex];
if(!segment.isPrefixed()) {
segments[networkSegmentIndex] = segProducer.apply(segment, segmentBitCount);
}
}
} } | public class class_name {
protected static <S extends IPAddressSegment> void normalizePrefixBoundary(
int sectionPrefixBits,
S segments[],
int segmentBitCount,
int segmentByteCount,
BiFunction<S, Integer, S> segProducer) {
//we've already verified segment prefixes in super constructor. We simply need to check the case where the prefix is at a segment boundary,
//whether the network side has the correct prefix
int networkSegmentIndex = getNetworkSegmentIndex(sectionPrefixBits, segmentByteCount, segmentBitCount);
if(networkSegmentIndex >= 0) {
S segment = segments[networkSegmentIndex];
if(!segment.isPrefixed()) {
segments[networkSegmentIndex] = segProducer.apply(segment, segmentBitCount); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public V put(Integer key, V value)
{
if (keyTooLarge(key))
{
expand(key);
}
int offset = offset(key);
V oldValue = (V) data[offset];
data[offset] = value;
// If the key is beyond the current end of the array, then move the end up.
if (key >= end)
{
end = key + 1;
}
// Increment the count only if a new value was inserted.
if (oldValue == null)
{
count++;
}
return oldValue;
} } | public class class_name {
public V put(Integer key, V value)
{
if (keyTooLarge(key))
{
expand(key); // depends on control dependency: [if], data = [none]
}
int offset = offset(key);
V oldValue = (V) data[offset];
data[offset] = value;
// If the key is beyond the current end of the array, then move the end up.
if (key >= end)
{
end = key + 1; // depends on control dependency: [if], data = [none]
}
// Increment the count only if a new value was inserted.
if (oldValue == null)
{
count++; // depends on control dependency: [if], data = [none]
}
return oldValue;
} } |
public class class_name {
public static Charset detect(@NonNull byte[] buffer, int offset, int length) {
Preconditions.checkArgument(length > 0);
Preconditions.checkArgument(offset >= 0);
final com.ibm.icu.text.CharsetDetector detector = new com.ibm.icu.text.CharsetDetector();
try {
detector.setText(new ByteArrayInputStream(buffer, offset, length));
return Charset.forName(detector.detect().getName());
} catch (Exception e) {
return null;
}
} } | public class class_name {
public static Charset detect(@NonNull byte[] buffer, int offset, int length) {
Preconditions.checkArgument(length > 0);
Preconditions.checkArgument(offset >= 0);
final com.ibm.icu.text.CharsetDetector detector = new com.ibm.icu.text.CharsetDetector();
try {
detector.setText(new ByteArrayInputStream(buffer, offset, length)); // depends on control dependency: [try], data = [none]
return Charset.forName(detector.detect().getName()); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
return null;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void processCalendars(List<Row> rows)
{
for (Row row : rows)
{
processCalendar(row);
}
if (m_defaultCalendarID != null)
{
ProjectCalendar defaultCalendar = m_calMap.get(m_defaultCalendarID);
// Primavera XER files can sometimes not contain a definition of the default
// project calendar so only try to set if we find a definition.
if (defaultCalendar != null)
{
m_project.setDefaultCalendar(defaultCalendar);
}
}
} } | public class class_name {
public void processCalendars(List<Row> rows)
{
for (Row row : rows)
{
processCalendar(row); // depends on control dependency: [for], data = [row]
}
if (m_defaultCalendarID != null)
{
ProjectCalendar defaultCalendar = m_calMap.get(m_defaultCalendarID);
// Primavera XER files can sometimes not contain a definition of the default
// project calendar so only try to set if we find a definition.
if (defaultCalendar != null)
{
m_project.setDefaultCalendar(defaultCalendar); // depends on control dependency: [if], data = [(defaultCalendar]
}
}
} } |
public class class_name {
@Override
public List<String> getPropertyNames(Object instance) {
if (!CollectionUtils.isEmpty(propertyNames) && instance.getClass().equals(this.beanType)) {
return this.propertyNames;
} else {
return PropertyProxyUtils.findPropertyNames(this.conversionService, this.useDirectFieldAccess, instance);
}
} } | public class class_name {
@Override
public List<String> getPropertyNames(Object instance) {
if (!CollectionUtils.isEmpty(propertyNames) && instance.getClass().equals(this.beanType)) {
return this.propertyNames; // depends on control dependency: [if], data = [none]
} else {
return PropertyProxyUtils.findPropertyNames(this.conversionService, this.useDirectFieldAccess, instance); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public boolean addAll(Object[] listenersToAdd) {
if (listenersToAdd == null) {
return false;
}
boolean changed = false;
for (int i = 0; i < listenersToAdd.length; i++) {
if (add(listenersToAdd[i])) {
changed = true;
}
}
return changed;
} } | public class class_name {
public boolean addAll(Object[] listenersToAdd) {
if (listenersToAdd == null) {
return false; // depends on control dependency: [if], data = [none]
}
boolean changed = false;
for (int i = 0; i < listenersToAdd.length; i++) {
if (add(listenersToAdd[i])) {
changed = true; // depends on control dependency: [if], data = [none]
}
}
return changed;
} } |
public class class_name {
static long getTime() {
if (RANDOM == null)
initializeForType1();
long newTime = getUUIDTime();
if (newTime <= _lastMillis) {
incrementSequence();
newTime = getUUIDTime();
}
_lastMillis = newTime;
return newTime;
} } | public class class_name {
static long getTime() {
if (RANDOM == null)
initializeForType1();
long newTime = getUUIDTime();
if (newTime <= _lastMillis) {
incrementSequence(); // depends on control dependency: [if], data = [none]
newTime = getUUIDTime(); // depends on control dependency: [if], data = [none]
}
_lastMillis = newTime;
return newTime;
} } |
public class class_name {
@SuppressWarnings("unchecked")
private Set<Resource> lookupRelationshipField(Collection<Resource> sourceResources, ResourceField relationshipField, QueryAdapter queryAdapter, RepositoryMethodParameterProvider parameterProvider,
Map<ResourceIdentifier, Resource> resourceMap, Map<ResourceIdentifier, Object> entityMap) {
if (sourceResources.isEmpty()) {
return Collections.emptySet();
}
ResourceInformation resourceInformation = relationshipField.getParentResourceInformation();
RegistryEntry registyEntry = resourceRegistry.getEntry(resourceInformation.getResourceType());
List<Serializable> resourceIds = getIds(sourceResources, resourceInformation);
boolean isMany = Iterable.class.isAssignableFrom(relationshipField.getType());
Class<?> relationshipFieldClass = relationshipField.getElementType();
Set<Resource> loadedTargets = new HashSet<>();
@SuppressWarnings("rawtypes")
RelationshipRepositoryAdapter relationshipRepository = registyEntry.getRelationshipRepositoryForClass(relationshipFieldClass, parameterProvider);
if (relationshipRepository != null) {
Map<Object, JsonApiResponse> responseMap;
if (isMany) {
responseMap = relationshipRepository.findBulkManyTargets(resourceIds, relationshipField, queryAdapter);
} else {
responseMap = relationshipRepository.findBulkOneTargets(resourceIds, relationshipField, queryAdapter);
}
for (Resource sourceResource : sourceResources) {
Serializable sourceId = resourceInformation.parseIdString(sourceResource.getId());
JsonApiResponse targetResponse = responseMap.get(sourceId);
if (targetResponse != null && targetResponse.getEntity() != null) {
Object targetEntity = targetResponse.getEntity();
List<Resource> targets = setupRelation(sourceResource, relationshipField, targetEntity, queryAdapter, resourceMap, entityMap);
loadedTargets.addAll(targets);
} else {
Nullable<Object> emptyData = Nullable.of(Iterable.class.isAssignableFrom(relationshipField.getType()) ? (Object) Collections.emptyList() : null);
Relationship relationship = sourceResource.getRelationships().get(relationshipField.getJsonName());
relationship.setData(emptyData);
}
}
}
return loadedTargets;
} } | public class class_name {
@SuppressWarnings("unchecked")
private Set<Resource> lookupRelationshipField(Collection<Resource> sourceResources, ResourceField relationshipField, QueryAdapter queryAdapter, RepositoryMethodParameterProvider parameterProvider,
Map<ResourceIdentifier, Resource> resourceMap, Map<ResourceIdentifier, Object> entityMap) {
if (sourceResources.isEmpty()) {
return Collections.emptySet(); // depends on control dependency: [if], data = [none]
}
ResourceInformation resourceInformation = relationshipField.getParentResourceInformation();
RegistryEntry registyEntry = resourceRegistry.getEntry(resourceInformation.getResourceType());
List<Serializable> resourceIds = getIds(sourceResources, resourceInformation);
boolean isMany = Iterable.class.isAssignableFrom(relationshipField.getType());
Class<?> relationshipFieldClass = relationshipField.getElementType();
Set<Resource> loadedTargets = new HashSet<>();
@SuppressWarnings("rawtypes")
RelationshipRepositoryAdapter relationshipRepository = registyEntry.getRelationshipRepositoryForClass(relationshipFieldClass, parameterProvider);
if (relationshipRepository != null) {
Map<Object, JsonApiResponse> responseMap;
if (isMany) {
responseMap = relationshipRepository.findBulkManyTargets(resourceIds, relationshipField, queryAdapter); // depends on control dependency: [if], data = [none]
} else {
responseMap = relationshipRepository.findBulkOneTargets(resourceIds, relationshipField, queryAdapter); // depends on control dependency: [if], data = [none]
}
for (Resource sourceResource : sourceResources) {
Serializable sourceId = resourceInformation.parseIdString(sourceResource.getId());
JsonApiResponse targetResponse = responseMap.get(sourceId);
if (targetResponse != null && targetResponse.getEntity() != null) {
Object targetEntity = targetResponse.getEntity();
List<Resource> targets = setupRelation(sourceResource, relationshipField, targetEntity, queryAdapter, resourceMap, entityMap);
loadedTargets.addAll(targets); // depends on control dependency: [if], data = [none]
} else {
Nullable<Object> emptyData = Nullable.of(Iterable.class.isAssignableFrom(relationshipField.getType()) ? (Object) Collections.emptyList() : null);
Relationship relationship = sourceResource.getRelationships().get(relationshipField.getJsonName());
relationship.setData(emptyData); // depends on control dependency: [if], data = [none]
}
}
}
return loadedTargets;
} } |
public class class_name {
public static void runExample(
AdWordsServicesInterface adWordsServices, AdWordsSession session) throws RemoteException {
// Get the CampaignService.
CampaignServiceInterface campaignService =
adWordsServices.get(session, CampaignServiceInterface.class);
int offset = 0;
// Create selector.
SelectorBuilder builder = new SelectorBuilder();
Selector selector = builder
.fields(CampaignField.Id, CampaignField.Name)
.orderAscBy(CampaignField.Name)
.offset(offset)
.limit(PAGE_SIZE)
.build();
CampaignPage page;
do {
// Get all campaigns.
page = campaignService.get(selector);
// Display campaigns.
if (page.getEntries() != null) {
for (Campaign campaign : page.getEntries()) {
System.out.printf("Campaign with name '%s' and ID %d was found.%n", campaign.getName(),
campaign.getId());
}
} else {
System.out.println("No campaigns were found.");
}
offset += PAGE_SIZE;
selector = builder.increaseOffsetBy(PAGE_SIZE).build();
} while (offset < page.getTotalNumEntries());
} } | public class class_name {
public static void runExample(
AdWordsServicesInterface adWordsServices, AdWordsSession session) throws RemoteException {
// Get the CampaignService.
CampaignServiceInterface campaignService =
adWordsServices.get(session, CampaignServiceInterface.class);
int offset = 0;
// Create selector.
SelectorBuilder builder = new SelectorBuilder();
Selector selector = builder
.fields(CampaignField.Id, CampaignField.Name)
.orderAscBy(CampaignField.Name)
.offset(offset)
.limit(PAGE_SIZE)
.build();
CampaignPage page;
do {
// Get all campaigns.
page = campaignService.get(selector);
// Display campaigns.
if (page.getEntries() != null) {
for (Campaign campaign : page.getEntries()) {
System.out.printf("Campaign with name '%s' and ID %d was found.%n", campaign.getName(),
campaign.getId()); // depends on control dependency: [for], data = [none]
}
} else {
System.out.println("No campaigns were found."); // depends on control dependency: [if], data = [none]
}
offset += PAGE_SIZE;
selector = builder.increaseOffsetBy(PAGE_SIZE).build();
} while (offset < page.getTotalNumEntries());
} } |
public class class_name {
@Override
public TimeZoneRule[] getTimeZoneRules() {
initTransitionRules();
int size = useDaylight ? 3 : 1;
TimeZoneRule[] rules = new TimeZoneRule[size];
rules[0] = initialRule;
if (useDaylight) {
rules[1] = stdRule;
rules[2] = dstRule;
}
return rules;
} } | public class class_name {
@Override
public TimeZoneRule[] getTimeZoneRules() {
initTransitionRules();
int size = useDaylight ? 3 : 1;
TimeZoneRule[] rules = new TimeZoneRule[size];
rules[0] = initialRule;
if (useDaylight) {
rules[1] = stdRule; // depends on control dependency: [if], data = [none]
rules[2] = dstRule; // depends on control dependency: [if], data = [none]
}
return rules;
} } |
public class class_name {
private List<Resource> getConfigurations() {
List<Resource> result = new ArrayList<Resource>();
for (String delimitedConfigurationUrl : configurationUrls) {
String[] tokens = StringUtils.commaDelimitedListToStringArray(delimitedConfigurationUrl);
tokens = StringUtils.trimArrayElements(tokens);
for (String configurationUrl : tokens) {
configurationUrl = configurationUrl.trim();
Resource configuration = resourceLoader.getResource(configurationUrl);
if (configuration != null && configuration.exists()) {
result.add(configuration);
} else if (ignoreConfigurationNotFound) {
logger.debug("Ignore missing configuration file {}", configuration);
} else {
throw new EmbeddedJmxTransException("Configuration file " + configuration + " not found");
}
}
}
return result;
} } | public class class_name {
private List<Resource> getConfigurations() {
List<Resource> result = new ArrayList<Resource>();
for (String delimitedConfigurationUrl : configurationUrls) {
String[] tokens = StringUtils.commaDelimitedListToStringArray(delimitedConfigurationUrl);
tokens = StringUtils.trimArrayElements(tokens); // depends on control dependency: [for], data = [none]
for (String configurationUrl : tokens) {
configurationUrl = configurationUrl.trim(); // depends on control dependency: [for], data = [configurationUrl]
Resource configuration = resourceLoader.getResource(configurationUrl);
if (configuration != null && configuration.exists()) {
result.add(configuration); // depends on control dependency: [if], data = [(configuration]
} else if (ignoreConfigurationNotFound) {
logger.debug("Ignore missing configuration file {}", configuration); // depends on control dependency: [if], data = [none]
} else {
throw new EmbeddedJmxTransException("Configuration file " + configuration + " not found");
}
}
}
return result;
} } |
public class class_name {
public static int indexFor(Composite name, List<IndexInfo> indexList, CType comparator, boolean reversed, int lastIndex)
{
if (name.isEmpty())
return lastIndex >= 0 ? lastIndex : reversed ? indexList.size() - 1 : 0;
if (lastIndex >= indexList.size())
return -1;
IndexInfo target = new IndexInfo(name, name, 0, 0);
/*
Take the example from the unit test, and say your index looks like this:
[0..5][10..15][20..25]
and you look for the slice [13..17].
When doing forward slice, we we doing a binary search comparing 13 (the start of the query)
to the lastName part of the index slot. You'll end up with the "first" slot, going from left to right,
that may contain the start.
When doing a reverse slice, we do the same thing, only using as a start column the end of the query,
i.e. 17 in this example, compared to the firstName part of the index slots. bsearch will give us the
first slot where firstName > start ([20..25] here), so we subtract an extra one to get the slot just before.
*/
int startIdx = 0;
List<IndexInfo> toSearch = indexList;
if (lastIndex >= 0)
{
if (reversed)
{
toSearch = indexList.subList(0, lastIndex + 1);
}
else
{
startIdx = lastIndex;
toSearch = indexList.subList(lastIndex, indexList.size());
}
}
int index = Collections.binarySearch(toSearch, target, getComparator(comparator, reversed));
return startIdx + (index < 0 ? -index - (reversed ? 2 : 1) : index);
} } | public class class_name {
public static int indexFor(Composite name, List<IndexInfo> indexList, CType comparator, boolean reversed, int lastIndex)
{
if (name.isEmpty())
return lastIndex >= 0 ? lastIndex : reversed ? indexList.size() - 1 : 0;
if (lastIndex >= indexList.size())
return -1;
IndexInfo target = new IndexInfo(name, name, 0, 0);
/*
Take the example from the unit test, and say your index looks like this:
[0..5][10..15][20..25]
and you look for the slice [13..17].
When doing forward slice, we we doing a binary search comparing 13 (the start of the query)
to the lastName part of the index slot. You'll end up with the "first" slot, going from left to right,
that may contain the start.
When doing a reverse slice, we do the same thing, only using as a start column the end of the query,
i.e. 17 in this example, compared to the firstName part of the index slots. bsearch will give us the
first slot where firstName > start ([20..25] here), so we subtract an extra one to get the slot just before.
*/
int startIdx = 0;
List<IndexInfo> toSearch = indexList;
if (lastIndex >= 0)
{
if (reversed)
{
toSearch = indexList.subList(0, lastIndex + 1); // depends on control dependency: [if], data = [none]
}
else
{
startIdx = lastIndex; // depends on control dependency: [if], data = [none]
toSearch = indexList.subList(lastIndex, indexList.size()); // depends on control dependency: [if], data = [none]
}
}
int index = Collections.binarySearch(toSearch, target, getComparator(comparator, reversed));
return startIdx + (index < 0 ? -index - (reversed ? 2 : 1) : index);
} } |
public class class_name {
public double[] findGridBoundaries() {
IAtom[] atoms = AtomContainerManipulator.getAtomArray(protein);
double[] minMax = new double[6];
minMax[0] = atoms[0].getPoint3d().x;
minMax[1] = atoms[0].getPoint3d().x;
minMax[2] = atoms[0].getPoint3d().y;
minMax[3] = atoms[0].getPoint3d().y;
minMax[4] = atoms[0].getPoint3d().z;
minMax[5] = atoms[0].getPoint3d().z;
for (int i = 0; i < atoms.length; i++) {
if (atoms[i].getPoint3d().x > minMax[1]) {
minMax[1] = atoms[i].getPoint3d().x;
} else if (atoms[i].getPoint3d().y > minMax[3]) {
minMax[3] = atoms[i].getPoint3d().y;
} else if (atoms[i].getPoint3d().z > minMax[5]) {
minMax[5] = atoms[i].getPoint3d().z;
} else if (atoms[i].getPoint3d().x < minMax[0]) {
minMax[0] = atoms[i].getPoint3d().x;
} else if (atoms[i].getPoint3d().y < minMax[2]) {
minMax[2] = atoms[i].getPoint3d().y;
} else if (atoms[i].getPoint3d().z < minMax[4]) {
minMax[4] = atoms[i].getPoint3d().z;
}
}
return minMax;
} } | public class class_name {
public double[] findGridBoundaries() {
IAtom[] atoms = AtomContainerManipulator.getAtomArray(protein);
double[] minMax = new double[6];
minMax[0] = atoms[0].getPoint3d().x;
minMax[1] = atoms[0].getPoint3d().x;
minMax[2] = atoms[0].getPoint3d().y;
minMax[3] = atoms[0].getPoint3d().y;
minMax[4] = atoms[0].getPoint3d().z;
minMax[5] = atoms[0].getPoint3d().z;
for (int i = 0; i < atoms.length; i++) {
if (atoms[i].getPoint3d().x > minMax[1]) {
minMax[1] = atoms[i].getPoint3d().x; // depends on control dependency: [if], data = [none]
} else if (atoms[i].getPoint3d().y > minMax[3]) {
minMax[3] = atoms[i].getPoint3d().y; // depends on control dependency: [if], data = [none]
} else if (atoms[i].getPoint3d().z > minMax[5]) {
minMax[5] = atoms[i].getPoint3d().z; // depends on control dependency: [if], data = [none]
} else if (atoms[i].getPoint3d().x < minMax[0]) {
minMax[0] = atoms[i].getPoint3d().x; // depends on control dependency: [if], data = [none]
} else if (atoms[i].getPoint3d().y < minMax[2]) {
minMax[2] = atoms[i].getPoint3d().y; // depends on control dependency: [if], data = [none]
} else if (atoms[i].getPoint3d().z < minMax[4]) {
minMax[4] = atoms[i].getPoint3d().z; // depends on control dependency: [if], data = [none]
}
}
return minMax;
} } |
public class class_name {
public void unsubscribe(final String pattern)
throws InvalidInputException
{
final String s = this.prefix + pattern;
LinkedList<FunctionInterface9> callback_list = this.callbacks.get(s);
if (callback_list == null)
{
throw new InvalidInputException();
}
else
{
callback_list.removeFirst();
if (callback_list.isEmpty())
{
this.callbacks.remove(s);
}
}
OtpOutputStream unsubscribe = new OtpOutputStream();
unsubscribe.write(OtpExternal.versionTag);
final OtpErlangObject[] tuple = {new OtpErlangAtom("unsubscribe"),
new OtpErlangString(pattern)};
unsubscribe.write_any(new OtpErlangTuple(tuple));
send(unsubscribe);
} } | public class class_name {
public void unsubscribe(final String pattern)
throws InvalidInputException
{
final String s = this.prefix + pattern;
LinkedList<FunctionInterface9> callback_list = this.callbacks.get(s);
if (callback_list == null)
{
throw new InvalidInputException();
}
else
{
callback_list.removeFirst();
if (callback_list.isEmpty())
{
this.callbacks.remove(s); // depends on control dependency: [if], data = [none]
}
}
OtpOutputStream unsubscribe = new OtpOutputStream();
unsubscribe.write(OtpExternal.versionTag);
final OtpErlangObject[] tuple = {new OtpErlangAtom("unsubscribe"),
new OtpErlangString(pattern)};
unsubscribe.write_any(new OtpErlangTuple(tuple));
send(unsubscribe);
} } |
public class class_name {
private void runForBlockingDataTransfer(SiteProcedureConnection siteConnection)
{
boolean sourcesReady = false;
RestoreWork restoreWork = m_dataSink.poll(m_snapshotBufferAllocator);
if (restoreWork != null) {
restoreBlock(restoreWork, siteConnection);
sourcesReady = true;
}
// The completion monitor may fire even if m_dataSink has not reached EOF in the case that there's no
// replicated table in the database, so check for both conditions.
if (m_dataSink.isEOF() || m_snapshotCompletionMonitor.isDone()) {
// No more data from this data sink, close and remove it from the list
m_dataSink.close();
if (m_streamSnapshotMb != null) {
VoltDB.instance().getHostMessenger().removeMailbox(m_streamSnapshotMb.getHSId());
m_streamSnapshotMb = null;
ELASTICLOG.debug(m_whoami + " data transfer is finished");
}
if (m_snapshotCompletionMonitor.isDone()) {
try {
SnapshotCompletionEvent event = m_snapshotCompletionMonitor.get();
siteConnection.setDRProtocolVersion(event.drVersion);
assert(event != null);
ELASTICLOG.debug("P" + m_partitionId + " noticed data transfer completion");
m_completionAction.setSnapshotTxnId(event.multipartTxnId);
setJoinComplete(siteConnection,
event.exportSequenceNumbers,
event.drSequenceNumbers,
event.drMixedClusterSizeConsumerState,
false /* requireExistingSequenceNumbers */,
event.clusterCreateTime);
} catch (InterruptedException e) {
// isDone() already returned true, this shouldn't happen
VoltDB.crashLocalVoltDB("Impossible interruption happend", true, e);
} catch (ExecutionException e) {
VoltDB.crashLocalVoltDB("Error waiting for snapshot to finish", true, e);
}
} else {
m_taskQueue.offer(this);
}
} else {
// The sources are not set up yet, don't block the site,
// return here and retry later.
returnToTaskQueue(sourcesReady);
}
} } | public class class_name {
private void runForBlockingDataTransfer(SiteProcedureConnection siteConnection)
{
boolean sourcesReady = false;
RestoreWork restoreWork = m_dataSink.poll(m_snapshotBufferAllocator);
if (restoreWork != null) {
restoreBlock(restoreWork, siteConnection); // depends on control dependency: [if], data = [(restoreWork]
sourcesReady = true; // depends on control dependency: [if], data = [none]
}
// The completion monitor may fire even if m_dataSink has not reached EOF in the case that there's no
// replicated table in the database, so check for both conditions.
if (m_dataSink.isEOF() || m_snapshotCompletionMonitor.isDone()) {
// No more data from this data sink, close and remove it from the list
m_dataSink.close(); // depends on control dependency: [if], data = [none]
if (m_streamSnapshotMb != null) {
VoltDB.instance().getHostMessenger().removeMailbox(m_streamSnapshotMb.getHSId()); // depends on control dependency: [if], data = [(m_streamSnapshotMb]
m_streamSnapshotMb = null; // depends on control dependency: [if], data = [none]
ELASTICLOG.debug(m_whoami + " data transfer is finished"); // depends on control dependency: [if], data = [none]
}
if (m_snapshotCompletionMonitor.isDone()) {
try {
SnapshotCompletionEvent event = m_snapshotCompletionMonitor.get();
siteConnection.setDRProtocolVersion(event.drVersion); // depends on control dependency: [try], data = [none]
assert(event != null); // depends on control dependency: [try], data = [none]
ELASTICLOG.debug("P" + m_partitionId + " noticed data transfer completion"); // depends on control dependency: [try], data = [none]
m_completionAction.setSnapshotTxnId(event.multipartTxnId); // depends on control dependency: [try], data = [none]
setJoinComplete(siteConnection,
event.exportSequenceNumbers,
event.drSequenceNumbers,
event.drMixedClusterSizeConsumerState,
false /* requireExistingSequenceNumbers */,
event.clusterCreateTime); // depends on control dependency: [try], data = [none]
} catch (InterruptedException e) {
// isDone() already returned true, this shouldn't happen
VoltDB.crashLocalVoltDB("Impossible interruption happend", true, e);
} catch (ExecutionException e) { // depends on control dependency: [catch], data = [none]
VoltDB.crashLocalVoltDB("Error waiting for snapshot to finish", true, e);
} // depends on control dependency: [catch], data = [none]
} else {
m_taskQueue.offer(this); // depends on control dependency: [if], data = [none]
}
} else {
// The sources are not set up yet, don't block the site,
// return here and retry later.
returnToTaskQueue(sourcesReady); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public Cookie[] getCookies() {
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15
logger.logp(Level.FINE, CLASS_NAME,"getCookies","["+this+"]");
}
return (_response.getCookies());
} } | public class class_name {
public Cookie[] getCookies() {
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15
logger.logp(Level.FINE, CLASS_NAME,"getCookies","["+this+"]"); // depends on control dependency: [if], data = [none]
}
return (_response.getCookies());
} } |
public class class_name {
private void configureSsl(SocketChannel ch) {
SslContext sslCtx = SslContextBuilder.build();
ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast(sslCtx.newHandler(ch.alloc()));
// We must wait for the handshake to finish and the protocol to be negotiated before configuring
// the HTTP/2 components of the pipeline.
pipeline.addLast(new ApplicationProtocolNegotiationHandler("") {
@Override
protected void configurePipeline(ChannelHandlerContext ctx, String protocol) {
if (ApplicationProtocolNames.HTTP_2.equals(protocol)) {
ChannelPipeline p = ctx.pipeline();
p.addLast(connectionHandler);
configureEndOfPipeline(p);
return;
}
ctx.close();
throw new IllegalStateException("unknown protocol: " + protocol);
}
});
} } | public class class_name {
private void configureSsl(SocketChannel ch) {
SslContext sslCtx = SslContextBuilder.build();
ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast(sslCtx.newHandler(ch.alloc()));
// We must wait for the handshake to finish and the protocol to be negotiated before configuring
// the HTTP/2 components of the pipeline.
pipeline.addLast(new ApplicationProtocolNegotiationHandler("") {
@Override
protected void configurePipeline(ChannelHandlerContext ctx, String protocol) {
if (ApplicationProtocolNames.HTTP_2.equals(protocol)) {
ChannelPipeline p = ctx.pipeline();
p.addLast(connectionHandler); // depends on control dependency: [if], data = [none]
configureEndOfPipeline(p); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
ctx.close();
throw new IllegalStateException("unknown protocol: " + protocol);
}
});
} } |
public class class_name {
public DescribeScheduledActionsRequest withScheduledActionNames(String... scheduledActionNames) {
if (this.scheduledActionNames == null) {
setScheduledActionNames(new java.util.ArrayList<String>(scheduledActionNames.length));
}
for (String ele : scheduledActionNames) {
this.scheduledActionNames.add(ele);
}
return this;
} } | public class class_name {
public DescribeScheduledActionsRequest withScheduledActionNames(String... scheduledActionNames) {
if (this.scheduledActionNames == null) {
setScheduledActionNames(new java.util.ArrayList<String>(scheduledActionNames.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : scheduledActionNames) {
this.scheduledActionNames.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public void marshall(ConnectionPasswordEncryption connectionPasswordEncryption, ProtocolMarshaller protocolMarshaller) {
if (connectionPasswordEncryption == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(connectionPasswordEncryption.getReturnConnectionPasswordEncrypted(), RETURNCONNECTIONPASSWORDENCRYPTED_BINDING);
protocolMarshaller.marshall(connectionPasswordEncryption.getAwsKmsKeyId(), AWSKMSKEYID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(ConnectionPasswordEncryption connectionPasswordEncryption, ProtocolMarshaller protocolMarshaller) {
if (connectionPasswordEncryption == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(connectionPasswordEncryption.getReturnConnectionPasswordEncrypted(), RETURNCONNECTIONPASSWORDENCRYPTED_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(connectionPasswordEncryption.getAwsKmsKeyId(), AWSKMSKEYID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Nonnull
public static ESuccess copyInputStreamToOutputStream (@WillClose @Nullable final InputStream aIS,
@WillNotClose @Nullable final OutputStream aOS,
@Nonnull @Nonempty final byte [] aBuffer,
@Nullable final MutableLong aCopyByteCount,
@Nullable final Long aLimit)
{
try
{
ValueEnforcer.notEmpty (aBuffer, "Buffer");
ValueEnforcer.isTrue (aLimit == null || aLimit.longValue () >= 0, () -> "Limit may not be negative: " + aLimit);
if (aIS != null && aOS != null)
{
// both streams are not null
long nTotalBytesCopied;
if (aLimit == null)
nTotalBytesCopied = _copyInputStreamToOutputStream (aIS, aOS, aBuffer);
else
nTotalBytesCopied = _copyInputStreamToOutputStreamWithLimit (aIS, aOS, aBuffer, aLimit.longValue ());
// Add to statistics
s_aByteSizeHdl.addSize (nTotalBytesCopied);
// Remember copied bytes?
if (aCopyByteCount != null)
aCopyByteCount.set (nTotalBytesCopied);
return ESuccess.SUCCESS;
}
}
catch (final IOException ex)
{
if (!isKnownEOFException (ex))
LOGGER.error ("Failed to copy from stream to stream", ex instanceof IMockException ? null : ex);
}
finally
{
// Ensure input stream is closed, even if output stream is null
close (aIS);
}
return ESuccess.FAILURE;
} } | public class class_name {
@Nonnull
public static ESuccess copyInputStreamToOutputStream (@WillClose @Nullable final InputStream aIS,
@WillNotClose @Nullable final OutputStream aOS,
@Nonnull @Nonempty final byte [] aBuffer,
@Nullable final MutableLong aCopyByteCount,
@Nullable final Long aLimit)
{
try
{
ValueEnforcer.notEmpty (aBuffer, "Buffer"); // depends on control dependency: [try], data = [none]
ValueEnforcer.isTrue (aLimit == null || aLimit.longValue () >= 0, () -> "Limit may not be negative: " + aLimit); // depends on control dependency: [try], data = [none]
if (aIS != null && aOS != null)
{
// both streams are not null
long nTotalBytesCopied;
if (aLimit == null)
nTotalBytesCopied = _copyInputStreamToOutputStream (aIS, aOS, aBuffer);
else
nTotalBytesCopied = _copyInputStreamToOutputStreamWithLimit (aIS, aOS, aBuffer, aLimit.longValue ());
// Add to statistics
s_aByteSizeHdl.addSize (nTotalBytesCopied); // depends on control dependency: [if], data = [none]
// Remember copied bytes?
if (aCopyByteCount != null)
aCopyByteCount.set (nTotalBytesCopied);
return ESuccess.SUCCESS; // depends on control dependency: [if], data = [none]
}
}
catch (final IOException ex)
{
if (!isKnownEOFException (ex))
LOGGER.error ("Failed to copy from stream to stream", ex instanceof IMockException ? null : ex);
} // depends on control dependency: [catch], data = [none]
finally
{
// Ensure input stream is closed, even if output stream is null
close (aIS);
}
return ESuccess.FAILURE;
} } |
public class class_name {
private HashMap<DensityGrid, CharacteristicVector> adjustForDenseGrid(DensityGrid dg, CharacteristicVector cv, int dgClass)
{
//System.out.print("Density grid "+dg.toString()+" is adjusted as a dense grid at time "+this.getCurrTime()+". ");
// Among all neighbours of dg, find the grid h whose cluster ch has the largest size
GridCluster ch; // The cluster, ch, of h
DensityGrid hChosen = new DensityGrid(dg); // The chosen grid h, whose cluster ch has the largest size
double hChosenSize = -1.0; // The size of ch, the largest cluster
DensityGrid dgH; // The neighbour of g being considered
int hClass = NO_CLASS; // The class label of h
int hChosenClass = NO_CLASS; // The class label of ch
Iterator<DensityGrid> dgNeighbourhood = dg.getNeighbours().iterator();
HashMap<DensityGrid, CharacteristicVector> glNew = new HashMap<DensityGrid, CharacteristicVector>();
while (dgNeighbourhood.hasNext())
{
dgH = dgNeighbourhood.next();
if (this.grid_list.containsKey(dgH))
{
hClass = this.grid_list.get(dgH).getLabel();
if (hClass != NO_CLASS)
{
ch = this.cluster_list.get(hClass);
if (ch.getWeight() > hChosenSize)
{
hChosenSize = ch.getWeight();
hChosenClass = hClass;
hChosen = new DensityGrid(dgH);
}
}
}
}
//System.out.println(" Chosen neighbour is "+hChosen.toString()+" from cluster "+hChosenClass+".");
if (hChosenClass != NO_CLASS && hChosenClass != dgClass)
{
ch = this.cluster_list.get(hChosenClass);
// If h is a dense grid
if (this.grid_list.get(hChosen).getAttribute() == DENSE)
{
//System.out.println("h is dense.");
// If dg is labelled as NO_CLASS
if(dgClass == NO_CLASS)
{
//System.out.println("g was labelled NO_CLASS");
cv.setLabel(hChosenClass);
glNew.put(dg, cv);
ch.addGrid(dg);
this.cluster_list.set(hChosenClass, ch);
}
// Else if dg belongs to cluster c and h belongs to c'
else
{
//System.out.println("g was labelled "+dgClass);
double gSize = this.cluster_list.get(dgClass).getWeight();
if (gSize <= hChosenSize)
mergeClusters(dgClass, hChosenClass);
else
mergeClusters(hChosenClass, dgClass);
}
}
// Else if h is a transitional grid
else if (this.grid_list.get(hChosen).getAttribute() == TRANSITIONAL)
{
//System.out.print("h is transitional.");
// If dg is labelled as no class and if h is an outside grid if dg is added to ch
if (dgClass == NO_CLASS && !ch.isInside(hChosen, dg))
{
cv.setLabel(hChosenClass);
glNew.put(dg, cv);
ch.addGrid(dg);
this.cluster_list.set(hChosenClass, ch);
//System.out.println(" dg is added to cluster "+hChosenClass+".");
}
// Else if dg is in cluster c and |c| >= |ch|
else if (dgClass != NO_CLASS)
{
GridCluster c = this.cluster_list.get(dgClass);
double gSize = c.getWeight();
if (gSize >= hChosenSize)
{
// Move h from cluster ch to cluster c
ch.removeGrid(hChosen);
c.addGrid(hChosen);
CharacteristicVector cvhChosen = this.grid_list.get(hChosen);
cvhChosen.setLabel(dgClass);
glNew.put(hChosen, cvhChosen);
//System.out.println("dgClass is "+dgClass+", hChosenClass is "+hChosenClass+", gSize is "+gSize+" and hChosenSize is "+hChosenSize+" h is added to cluster "+dgClass+".");
this.cluster_list.set(hChosenClass, ch);
this.cluster_list.set(dgClass, c);
}
}
}
}
// If dgClass is dense and not in a cluster, and none if its neighbours are in a cluster,
// put it in its own new cluster and search the neighbourhood for transitional or dense
// grids to add
else if (dgClass == NO_CLASS)
{
int newClass = this.cluster_list.size();
GridCluster c = new GridCluster((CFCluster)dg, new ArrayList<CFCluster>(), newClass);
c.addGrid(dg);
//System.out.println("Added "+dg.toString()+" to cluster "+newClass+".");
this.cluster_list.add(c);
cv.setLabel(newClass);
glNew.put(dg, cv);
// Iterate through the neighbourhood until no more transitional neighbours can be added
// (dense neighbours will add themselves as part of their adjust process)
dgNeighbourhood = dg.getNeighbours().iterator();
while(dgNeighbourhood.hasNext())
{
DensityGrid dghprime = dgNeighbourhood.next();
if (this.grid_list.containsKey(dghprime) && !c.getGrids().containsKey(dghprime))
{
CharacteristicVector cvhprime = this.grid_list.get(dghprime);
if(cvhprime.getAttribute() == TRANSITIONAL)
{
//System.out.println("Added "+dghprime.toString()+" to cluster "+newClass+".");
c.addGrid(dghprime);
cvhprime.setLabel(newClass);
glNew.put(dghprime, cvhprime);
}
}
}
this.cluster_list.set(newClass, c);
//System.out.println("Cluster "+newClass+": "+this.cluster_list.get(newClass).toString());
}
return glNew;
} } | public class class_name {
private HashMap<DensityGrid, CharacteristicVector> adjustForDenseGrid(DensityGrid dg, CharacteristicVector cv, int dgClass)
{
//System.out.print("Density grid "+dg.toString()+" is adjusted as a dense grid at time "+this.getCurrTime()+". ");
// Among all neighbours of dg, find the grid h whose cluster ch has the largest size
GridCluster ch; // The cluster, ch, of h
DensityGrid hChosen = new DensityGrid(dg); // The chosen grid h, whose cluster ch has the largest size
double hChosenSize = -1.0; // The size of ch, the largest cluster
DensityGrid dgH; // The neighbour of g being considered
int hClass = NO_CLASS; // The class label of h
int hChosenClass = NO_CLASS; // The class label of ch
Iterator<DensityGrid> dgNeighbourhood = dg.getNeighbours().iterator();
HashMap<DensityGrid, CharacteristicVector> glNew = new HashMap<DensityGrid, CharacteristicVector>();
while (dgNeighbourhood.hasNext())
{
dgH = dgNeighbourhood.next(); // depends on control dependency: [while], data = [none]
if (this.grid_list.containsKey(dgH))
{
hClass = this.grid_list.get(dgH).getLabel(); // depends on control dependency: [if], data = [none]
if (hClass != NO_CLASS)
{
ch = this.cluster_list.get(hClass); // depends on control dependency: [if], data = [(hClass]
if (ch.getWeight() > hChosenSize)
{
hChosenSize = ch.getWeight(); // depends on control dependency: [if], data = [none]
hChosenClass = hClass; // depends on control dependency: [if], data = [none]
hChosen = new DensityGrid(dgH); // depends on control dependency: [if], data = [none]
}
}
}
}
//System.out.println(" Chosen neighbour is "+hChosen.toString()+" from cluster "+hChosenClass+".");
if (hChosenClass != NO_CLASS && hChosenClass != dgClass)
{
ch = this.cluster_list.get(hChosenClass); // depends on control dependency: [if], data = [(hChosenClass]
// If h is a dense grid
if (this.grid_list.get(hChosen).getAttribute() == DENSE)
{
//System.out.println("h is dense.");
// If dg is labelled as NO_CLASS
if(dgClass == NO_CLASS)
{
//System.out.println("g was labelled NO_CLASS");
cv.setLabel(hChosenClass); // depends on control dependency: [if], data = [none]
glNew.put(dg, cv); // depends on control dependency: [if], data = [none]
ch.addGrid(dg); // depends on control dependency: [if], data = [none]
this.cluster_list.set(hChosenClass, ch); // depends on control dependency: [if], data = [none]
}
// Else if dg belongs to cluster c and h belongs to c'
else
{
//System.out.println("g was labelled "+dgClass);
double gSize = this.cluster_list.get(dgClass).getWeight();
if (gSize <= hChosenSize)
mergeClusters(dgClass, hChosenClass);
else
mergeClusters(hChosenClass, dgClass);
}
}
// Else if h is a transitional grid
else if (this.grid_list.get(hChosen).getAttribute() == TRANSITIONAL)
{
//System.out.print("h is transitional.");
// If dg is labelled as no class and if h is an outside grid if dg is added to ch
if (dgClass == NO_CLASS && !ch.isInside(hChosen, dg))
{
cv.setLabel(hChosenClass); // depends on control dependency: [if], data = [none]
glNew.put(dg, cv); // depends on control dependency: [if], data = [none]
ch.addGrid(dg); // depends on control dependency: [if], data = [none]
this.cluster_list.set(hChosenClass, ch); // depends on control dependency: [if], data = [none]
//System.out.println(" dg is added to cluster "+hChosenClass+".");
}
// Else if dg is in cluster c and |c| >= |ch|
else if (dgClass != NO_CLASS)
{
GridCluster c = this.cluster_list.get(dgClass);
double gSize = c.getWeight();
if (gSize >= hChosenSize)
{
// Move h from cluster ch to cluster c
ch.removeGrid(hChosen); // depends on control dependency: [if], data = [none]
c.addGrid(hChosen); // depends on control dependency: [if], data = [none]
CharacteristicVector cvhChosen = this.grid_list.get(hChosen);
cvhChosen.setLabel(dgClass); // depends on control dependency: [if], data = [none]
glNew.put(hChosen, cvhChosen); // depends on control dependency: [if], data = [none]
//System.out.println("dgClass is "+dgClass+", hChosenClass is "+hChosenClass+", gSize is "+gSize+" and hChosenSize is "+hChosenSize+" h is added to cluster "+dgClass+".");
this.cluster_list.set(hChosenClass, ch); // depends on control dependency: [if], data = [none]
this.cluster_list.set(dgClass, c); // depends on control dependency: [if], data = [none]
}
}
}
}
// If dgClass is dense and not in a cluster, and none if its neighbours are in a cluster,
// put it in its own new cluster and search the neighbourhood for transitional or dense
// grids to add
else if (dgClass == NO_CLASS)
{
int newClass = this.cluster_list.size();
GridCluster c = new GridCluster((CFCluster)dg, new ArrayList<CFCluster>(), newClass);
c.addGrid(dg); // depends on control dependency: [if], data = [none]
//System.out.println("Added "+dg.toString()+" to cluster "+newClass+".");
this.cluster_list.add(c); // depends on control dependency: [if], data = [none]
cv.setLabel(newClass); // depends on control dependency: [if], data = [none]
glNew.put(dg, cv); // depends on control dependency: [if], data = [none]
// Iterate through the neighbourhood until no more transitional neighbours can be added
// (dense neighbours will add themselves as part of their adjust process)
dgNeighbourhood = dg.getNeighbours().iterator(); // depends on control dependency: [if], data = [none]
while(dgNeighbourhood.hasNext())
{
DensityGrid dghprime = dgNeighbourhood.next();
if (this.grid_list.containsKey(dghprime) && !c.getGrids().containsKey(dghprime))
{
CharacteristicVector cvhprime = this.grid_list.get(dghprime);
if(cvhprime.getAttribute() == TRANSITIONAL)
{
//System.out.println("Added "+dghprime.toString()+" to cluster "+newClass+".");
c.addGrid(dghprime); // depends on control dependency: [if], data = [none]
cvhprime.setLabel(newClass); // depends on control dependency: [if], data = [none]
glNew.put(dghprime, cvhprime); // depends on control dependency: [if], data = [none]
}
}
}
this.cluster_list.set(newClass, c); // depends on control dependency: [if], data = [none]
//System.out.println("Cluster "+newClass+": "+this.cluster_list.get(newClass).toString());
}
return glNew;
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.