code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
|---|---|
public class class_name {
public void setModerationLabels(java.util.Collection<ContentModerationDetection> moderationLabels) {
if (moderationLabels == null) {
this.moderationLabels = null;
return;
}
this.moderationLabels = new java.util.ArrayList<ContentModerationDetection>(moderationLabels);
} }
|
public class class_name {
public void setModerationLabels(java.util.Collection<ContentModerationDetection> moderationLabels) {
if (moderationLabels == null) {
this.moderationLabels = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.moderationLabels = new java.util.ArrayList<ContentModerationDetection>(moderationLabels);
} }
|
public class class_name {
public static float[] toHSL(float red, float green, float blue) {
validateArithmeticRGB(red);
validateArithmeticRGB(green);
validateArithmeticRGB(blue);
float min = Math.min(Math.min(red, green), blue);
float max = Math.max(Math.max(red, green), blue);
float range = max - min;
float hue = 0.0f;
if (range > 0.0f) {
if (red >= green && red >= blue) {
hue = (green - blue) / range;
} else if (green >= blue) {
hue = 2 + (blue - red) / range;
} else {
hue = 4 + (red - green) / range;
}
}
hue *= 60.0f;
if (hue < 0.0f) {
hue += 360.0f;
}
float sum = min + max;
float lightness = sum / 2.0f;
float saturation;
if (min == max) {
saturation = 0.0f;
} else {
if (lightness < 0.5f) {
saturation = range / sum;
} else {
saturation = range / (2.0f - max - min);
}
}
return new float[] { hue, saturation, lightness };
} }
|
public class class_name {
public static float[] toHSL(float red, float green, float blue) {
validateArithmeticRGB(red);
validateArithmeticRGB(green);
validateArithmeticRGB(blue);
float min = Math.min(Math.min(red, green), blue);
float max = Math.max(Math.max(red, green), blue);
float range = max - min;
float hue = 0.0f;
if (range > 0.0f) {
if (red >= green && red >= blue) {
hue = (green - blue) / range; // depends on control dependency: [if], data = [none]
} else if (green >= blue) {
hue = 2 + (blue - red) / range; // depends on control dependency: [if], data = [none]
} else {
hue = 4 + (red - green) / range; // depends on control dependency: [if], data = [none]
}
}
hue *= 60.0f;
if (hue < 0.0f) {
hue += 360.0f; // depends on control dependency: [if], data = [none]
}
float sum = min + max;
float lightness = sum / 2.0f;
float saturation;
if (min == max) {
saturation = 0.0f; // depends on control dependency: [if], data = [none]
} else {
if (lightness < 0.5f) {
saturation = range / sum; // depends on control dependency: [if], data = [none]
} else {
saturation = range / (2.0f - max - min); // depends on control dependency: [if], data = [none]
}
}
return new float[] { hue, saturation, lightness };
} }
|
public class class_name {
public java.util.List<Host> getHosts() {
if (hosts == null) {
hosts = new com.amazonaws.internal.SdkInternalList<Host>();
}
return hosts;
} }
|
public class class_name {
public java.util.List<Host> getHosts() {
if (hosts == null) {
hosts = new com.amazonaws.internal.SdkInternalList<Host>(); // depends on control dependency: [if], data = [none]
}
return hosts;
} }
|
public class class_name {
public static boolean validOptions(final String options[][], DocErrorReporter reporter) {
final boolean toReturn = Standard.validOptions(options, reporter);
if(options == null || options.length == 0) {
eventSequence.add("validOptions (none supplied): " + toReturn);
} else{
for (int i = 0; i < options.length; i++) {
final String thisOption = Arrays.stream(options[i]).reduce((l, r) -> l + " " + r).orElse("<none>");
eventSequence.add("validOptions [" + i + " / " + options.length + "] (" + thisOption + "): " + toReturn);
}
}
// All Done.
return toReturn;
} }
|
public class class_name {
public static boolean validOptions(final String options[][], DocErrorReporter reporter) {
final boolean toReturn = Standard.validOptions(options, reporter);
if(options == null || options.length == 0) {
eventSequence.add("validOptions (none supplied): " + toReturn); // depends on control dependency: [if], data = [none]
} else{
for (int i = 0; i < options.length; i++) {
final String thisOption = Arrays.stream(options[i]).reduce((l, r) -> l + " " + r).orElse("<none>");
eventSequence.add("validOptions [" + i + " / " + options.length + "] (" + thisOption + "): " + toReturn); // depends on control dependency: [for], data = [i]
}
}
// All Done.
return toReturn;
} }
|
public class class_name {
public final boolean synpred149_InternalXbaseWithAnnotations() {
state.backtracking++;
int start = input.mark();
try {
synpred149_InternalXbaseWithAnnotations_fragment(); // can never throw exception
} catch (RecognitionException re) {
System.err.println("impossible: "+re);
}
boolean success = !state.failed;
input.rewind(start);
state.backtracking--;
state.failed=false;
return success;
} }
|
public class class_name {
public final boolean synpred149_InternalXbaseWithAnnotations() {
state.backtracking++;
int start = input.mark();
try {
synpred149_InternalXbaseWithAnnotations_fragment(); // can never throw exception // depends on control dependency: [try], data = [none]
} catch (RecognitionException re) {
System.err.println("impossible: "+re);
} // depends on control dependency: [catch], data = [none]
boolean success = !state.failed;
input.rewind(start);
state.backtracking--;
state.failed=false;
return success;
} }
|
public class class_name {
public Set<CrawlerURL> get(HTMLPageResponse theResponse) {
final String url = theResponse.getUrl();
Set<CrawlerURL> ahrefs = new HashSet<CrawlerURL>();
// only populate if we have a valid response, else return empty set
if (theResponse.getResponseCode() == HttpStatus.SC_OK) {
ahrefs = fetch(AHREF, ABS_HREF, theResponse.getBody(), url);
}
return ahrefs;
} }
|
public class class_name {
public Set<CrawlerURL> get(HTMLPageResponse theResponse) {
final String url = theResponse.getUrl();
Set<CrawlerURL> ahrefs = new HashSet<CrawlerURL>();
// only populate if we have a valid response, else return empty set
if (theResponse.getResponseCode() == HttpStatus.SC_OK) {
ahrefs = fetch(AHREF, ABS_HREF, theResponse.getBody(), url); // depends on control dependency: [if], data = [none]
}
return ahrefs;
} }
|
public class class_name {
@Override
public void sendData(TreeSet<Data> data) {
if (data == null) {
throw new IllegalArgumentException("Data must be not null");
}
if (data.isEmpty()) {
return;
}
addData(data);
if (distributed) {
partitionManager.notifyData(new ArrayList<>(data));
}
} }
|
public class class_name {
@Override
public void sendData(TreeSet<Data> data) {
if (data == null) {
throw new IllegalArgumentException("Data must be not null");
}
if (data.isEmpty()) {
return; // depends on control dependency: [if], data = [none]
}
addData(data);
if (distributed) {
partitionManager.notifyData(new ArrayList<>(data)); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static String getPathSeparator(String separator, String... path)
{
Check.notNull(separator);
Check.notNull(path);
final StringBuilder fullPath = new StringBuilder(path.length);
for (int i = 0; i < path.length; i++)
{
if (i == path.length - 1)
{
fullPath.append(path[i]);
}
else if (path[i] != null && path[i].length() > 0)
{
fullPath.append(path[i]);
if (!fullPath.substring(fullPath.length() - 1, fullPath.length()).equals(separator))
{
fullPath.append(separator);
}
}
}
return fullPath.toString();
} }
|
public class class_name {
public static String getPathSeparator(String separator, String... path)
{
Check.notNull(separator);
Check.notNull(path);
final StringBuilder fullPath = new StringBuilder(path.length);
for (int i = 0; i < path.length; i++)
{
if (i == path.length - 1)
{
fullPath.append(path[i]); // depends on control dependency: [if], data = [none]
}
else if (path[i] != null && path[i].length() > 0)
{
fullPath.append(path[i]); // depends on control dependency: [if], data = [(path[i]]
if (!fullPath.substring(fullPath.length() - 1, fullPath.length()).equals(separator))
{
fullPath.append(separator); // depends on control dependency: [if], data = [none]
}
}
}
return fullPath.toString();
} }
|
public class class_name {
void addToCluster(SquareNode seed, List<SquareNode> graph) {
open.clear();
open.add(seed);
while( !open.isEmpty() ) {
SquareNode n = open.remove( open.size() - 1 );
for (int i = 0; i < n.square.size(); i++) {
SquareEdge edge = n.edges[i];
if( edge == null )
continue;
SquareNode other;
if( edge.a == n )
other = edge.b;
else if( edge.b == n )
other = edge.a;
else
throw new RuntimeException("BUG!");
if( other.graph == SquareNode.RESET_GRAPH) {
other.graph = n.graph;
graph.add(other);
open.add(other);
} else if( other.graph != n.graph ) {
throw new RuntimeException("BUG! "+other.graph+" "+n.graph);
}
}
}
} }
|
public class class_name {
void addToCluster(SquareNode seed, List<SquareNode> graph) {
open.clear();
open.add(seed);
while( !open.isEmpty() ) {
SquareNode n = open.remove( open.size() - 1 );
for (int i = 0; i < n.square.size(); i++) {
SquareEdge edge = n.edges[i];
if( edge == null )
continue;
SquareNode other;
if( edge.a == n )
other = edge.b;
else if( edge.b == n )
other = edge.a;
else
throw new RuntimeException("BUG!");
if( other.graph == SquareNode.RESET_GRAPH) {
other.graph = n.graph; // depends on control dependency: [if], data = [none]
graph.add(other); // depends on control dependency: [if], data = [none]
open.add(other); // depends on control dependency: [if], data = [none]
} else if( other.graph != n.graph ) {
throw new RuntimeException("BUG! "+other.graph+" "+n.graph);
}
}
}
} }
|
public class class_name {
@Override
public String getIconClassName() {
// Oh the fun of somebody adding a legacy way of referencing images into 2.0 code
String pattern = getIconFilePathPattern();
if (pattern != null) {
// here we go with the dance of the IconSet's
String path = pattern.replace(":size", "24x24"); // we'll strip the icon-md to get the class name
if (path.indexOf('/') == -1) {
// this one is easy... too easy... also will never happen
return IconSet.toNormalizedIconNameClass(path);
}
if (Jenkins.RESOURCE_PATH.length() > 0 && path.startsWith(Jenkins.RESOURCE_PATH)) {
// will to live falling
path = path.substring(Jenkins.RESOURCE_PATH.length());
}
Icon icon = IconSet.icons.getIconByUrl(path);
if (icon != null) {
return icon.getClassSpec().replaceAll("\\s*icon-md\\s*", " ").replaceAll("\\s+", " ");
}
}
return null;
} }
|
public class class_name {
@Override
public String getIconClassName() {
// Oh the fun of somebody adding a legacy way of referencing images into 2.0 code
String pattern = getIconFilePathPattern();
if (pattern != null) {
// here we go with the dance of the IconSet's
String path = pattern.replace(":size", "24x24"); // we'll strip the icon-md to get the class name
if (path.indexOf('/') == -1) {
// this one is easy... too easy... also will never happen
return IconSet.toNormalizedIconNameClass(path); // depends on control dependency: [if], data = [none]
}
if (Jenkins.RESOURCE_PATH.length() > 0 && path.startsWith(Jenkins.RESOURCE_PATH)) {
// will to live falling
path = path.substring(Jenkins.RESOURCE_PATH.length()); // depends on control dependency: [if], data = [(Jenkins.RESOURCE_PATH.length()]
}
Icon icon = IconSet.icons.getIconByUrl(path);
if (icon != null) {
return icon.getClassSpec().replaceAll("\\s*icon-md\\s*", " ").replaceAll("\\s+", " "); // depends on control dependency: [if], data = [none]
}
}
return null;
} }
|
public class class_name {
private Object deserializeIdAttributeValue(final EntityMetadata m, String idValue)
{
if (idValue == null)
{
return null;
}
Class<?> embeddableClass = m.getIdAttribute().getBindableJavaType();
Object embeddedObject = embeddedObject = KunderaCoreUtils.createNewInstance(embeddableClass);
List<String> tokens = new ArrayList<String>();
StringTokenizer st = new StringTokenizer((String) idValue, COMPOSITE_KEY_SEPARATOR);
while (st.hasMoreTokens())
{
tokens.add((String) st.nextElement());
}
int count = 0;
for (Field embeddedField : embeddableClass.getDeclaredFields())
{
if (!ReflectUtils.isTransientOrStatic(embeddedField))
{
if (count < tokens.size())
{
String value = tokens.get(count++);
PropertyAccessorHelper.set(embeddedObject, embeddedField, value);
}
}
}
return embeddedObject;
} }
|
public class class_name {
private Object deserializeIdAttributeValue(final EntityMetadata m, String idValue)
{
if (idValue == null)
{
return null; // depends on control dependency: [if], data = [none]
}
Class<?> embeddableClass = m.getIdAttribute().getBindableJavaType();
Object embeddedObject = embeddedObject = KunderaCoreUtils.createNewInstance(embeddableClass);
List<String> tokens = new ArrayList<String>();
StringTokenizer st = new StringTokenizer((String) idValue, COMPOSITE_KEY_SEPARATOR);
while (st.hasMoreTokens())
{
tokens.add((String) st.nextElement());
}
int count = 0;
for (Field embeddedField : embeddableClass.getDeclaredFields())
{
if (!ReflectUtils.isTransientOrStatic(embeddedField))
{
if (count < tokens.size())
{
String value = tokens.get(count++);
PropertyAccessorHelper.set(embeddedObject, embeddedField, value); // depends on control dependency: [if], data = [none]
}
}
}
return embeddedObject;
} }
|
public class class_name {
public static boolean isOperandDependentOnTable(AbstractExpression expr,
String tableAlias) {
assert(tableAlias != null);
for (TupleValueExpression tve :
ExpressionUtil.getTupleValueExpressions(expr)) {
if (tableAlias.equals(tve.getTableAlias())) {
return true;
}
}
return false;
} }
|
public class class_name {
public static boolean isOperandDependentOnTable(AbstractExpression expr,
String tableAlias) {
assert(tableAlias != null);
for (TupleValueExpression tve :
ExpressionUtil.getTupleValueExpressions(expr)) {
if (tableAlias.equals(tve.getTableAlias())) {
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} }
|
public class class_name {
public DruidNodeDiscovery getForService(String serviceName)
{
return serviceDiscoveryMap.computeIfAbsent(
serviceName,
service -> {
Set<NodeType> nodeTypesToWatch = DruidNodeDiscoveryProvider.SERVICE_TO_NODE_TYPES.get(service);
if (nodeTypesToWatch == null) {
throw new IAE("Unknown service [%s].", service);
}
ServiceDruidNodeDiscovery serviceDiscovery = new ServiceDruidNodeDiscovery(service, nodeTypesToWatch.size());
DruidNodeDiscovery.Listener filteringGatheringUpstreamListener =
serviceDiscovery.filteringUpstreamListener();
for (NodeType nodeType : nodeTypesToWatch) {
getForNodeType(nodeType).registerListener(filteringGatheringUpstreamListener);
}
return serviceDiscovery;
}
);
} }
|
public class class_name {
public DruidNodeDiscovery getForService(String serviceName)
{
return serviceDiscoveryMap.computeIfAbsent(
serviceName,
service -> {
Set<NodeType> nodeTypesToWatch = DruidNodeDiscoveryProvider.SERVICE_TO_NODE_TYPES.get(service);
if (nodeTypesToWatch == null) {
throw new IAE("Unknown service [%s].", service);
}
ServiceDruidNodeDiscovery serviceDiscovery = new ServiceDruidNodeDiscovery(service, nodeTypesToWatch.size());
DruidNodeDiscovery.Listener filteringGatheringUpstreamListener =
serviceDiscovery.filteringUpstreamListener();
for (NodeType nodeType : nodeTypesToWatch) {
getForNodeType(nodeType).registerListener(filteringGatheringUpstreamListener); // depends on control dependency: [for], data = [nodeType]
}
return serviceDiscovery;
}
);
} }
|
public class class_name {
public static TaskEntity createAndInsert(VariableScope execution) {
TaskEntity task = create();
if (execution instanceof ExecutionEntity) {
ExecutionEntity executionEntity = (ExecutionEntity) execution;
task.setExecution(executionEntity);
task.skipCustomListeners = executionEntity.isSkipCustomListeners();
task.insert(executionEntity);
return task;
}
else if (execution instanceof CaseExecutionEntity) {
task.setCaseExecution((DelegateCaseExecution) execution);
}
task.insert(null);
return task;
} }
|
public class class_name {
public static TaskEntity createAndInsert(VariableScope execution) {
TaskEntity task = create();
if (execution instanceof ExecutionEntity) {
ExecutionEntity executionEntity = (ExecutionEntity) execution;
task.setExecution(executionEntity); // depends on control dependency: [if], data = [none]
task.skipCustomListeners = executionEntity.isSkipCustomListeners(); // depends on control dependency: [if], data = [none]
task.insert(executionEntity); // depends on control dependency: [if], data = [none]
return task; // depends on control dependency: [if], data = [none]
}
else if (execution instanceof CaseExecutionEntity) {
task.setCaseExecution((DelegateCaseExecution) execution); // depends on control dependency: [if], data = [none]
}
task.insert(null);
return task;
} }
|
public class class_name {
public void acceptGateway(MessageResourcesGateway specified) {
logger.info("...Accepting the gateway of message resources: " + specified);
if (specified == null) {
String msg = "The argument 'specified' should not be null.";
throw new IllegalArgumentException(msg);
}
if (gateway != null) {
String msg = "The gateway already exists: existing=" + gateway + " specified=" + specified;
throw new IllegalStateException(msg);
}
gateway = specified;
} }
|
public class class_name {
public void acceptGateway(MessageResourcesGateway specified) {
logger.info("...Accepting the gateway of message resources: " + specified);
if (specified == null) {
String msg = "The argument 'specified' should not be null."; // depends on control dependency: [if], data = [none]
throw new IllegalArgumentException(msg);
}
if (gateway != null) {
String msg = "The gateway already exists: existing=" + gateway + " specified=" + specified;
throw new IllegalStateException(msg);
}
gateway = specified;
} }
|
public class class_name {
public static String exec(Session session, String cmd, Charset charset, OutputStream errStream) {
if (null == charset) {
charset = CharsetUtil.CHARSET_UTF_8;
}
ChannelExec channel = (ChannelExec) openChannel(session, ChannelType.EXEC);
channel.setCommand(StrUtil.bytes(cmd, charset));
channel.setInputStream(null);
channel.setErrStream(errStream);
InputStream in = null;
try {
in = channel.getInputStream();
return IoUtil.read(in, CharsetUtil.CHARSET_UTF_8);
} catch (IOException e) {
throw new IORuntimeException(e);
} finally {
IoUtil.close(in);
close(channel);
}
} }
|
public class class_name {
public static String exec(Session session, String cmd, Charset charset, OutputStream errStream) {
if (null == charset) {
charset = CharsetUtil.CHARSET_UTF_8;
// depends on control dependency: [if], data = [none]
}
ChannelExec channel = (ChannelExec) openChannel(session, ChannelType.EXEC);
channel.setCommand(StrUtil.bytes(cmd, charset));
channel.setInputStream(null);
channel.setErrStream(errStream);
InputStream in = null;
try {
in = channel.getInputStream();
// depends on control dependency: [try], data = [none]
return IoUtil.read(in, CharsetUtil.CHARSET_UTF_8);
// depends on control dependency: [try], data = [none]
} catch (IOException e) {
throw new IORuntimeException(e);
} finally {
// depends on control dependency: [catch], data = [none]
IoUtil.close(in);
close(channel);
}
} }
|
public class class_name {
private boolean scanMessage(Plugin plugin, int messageId) {
Plugin test;
HistoryReference historyReference;
HttpMessage msg;
try {
historyReference = new HistoryReference(messageId, true);
msg = historyReference.getHttpMessage();
} catch (HttpMalformedHeaderException | DatabaseException e) {
log.warn("Failed to read message with ID [" + messageId + "], cause: " + e.getMessage());
return false;
}
try {
// Ensure the temporary nodes, added automatically to Sites tree, have a response.
// The scanners might base the logic/attacks on the state of the response (e.g. status code).
if (msg.getResponseHeader().isEmpty()) {
msg = msg.cloneRequest();
if (!obtainResponse(historyReference, msg)) {
return false;
}
}
if (log.isDebugEnabled()) {
log.debug("scanSingleNode node plugin=" + plugin.getName() + " node=" + historyReference.getURI().toString());
}
test = plugin.getClass().getDeclaredConstructor().newInstance();
test.setConfig(plugin.getConfig());
if (this.ruleConfigParam != null) {
// Set the configuration rules
for (RuleConfig rc : this.ruleConfigParam.getAllRuleConfigs()) {
test.getConfig().setProperty(rc.getKey(), rc.getValue());
}
}
test.setDelayInMs(plugin.getDelayInMs());
test.setDefaultAlertThreshold(plugin.getAlertThreshold());
test.setDefaultAttackStrength(plugin.getAttackStrength());
test.setTechSet(getTechSet());
test.init(msg, this);
notifyHostProgress(plugin.getName() + ": " + msg.getRequestHeader().getURI().toString());
} catch (Exception e) {
log.error(e.getMessage() + " " + historyReference.getURI().toString(), e);
return false;
}
Thread thread;
do {
if (this.isStop()) {
return false;
}
thread = threadPool.getFreeThreadAndRun(test);
if (thread == null) {
Util.sleep(200);
}
} while (thread == null);
mapPluginStats.get(plugin.getId()).incProgress();
return true;
} }
|
public class class_name {
private boolean scanMessage(Plugin plugin, int messageId) {
Plugin test;
HistoryReference historyReference;
HttpMessage msg;
try {
historyReference = new HistoryReference(messageId, true);
// depends on control dependency: [try], data = [none]
msg = historyReference.getHttpMessage();
// depends on control dependency: [try], data = [none]
} catch (HttpMalformedHeaderException | DatabaseException e) {
log.warn("Failed to read message with ID [" + messageId + "], cause: " + e.getMessage());
return false;
}
// depends on control dependency: [catch], data = [none]
try {
// Ensure the temporary nodes, added automatically to Sites tree, have a response.
// The scanners might base the logic/attacks on the state of the response (e.g. status code).
if (msg.getResponseHeader().isEmpty()) {
msg = msg.cloneRequest();
// depends on control dependency: [if], data = [none]
if (!obtainResponse(historyReference, msg)) {
return false;
// depends on control dependency: [if], data = [none]
}
}
if (log.isDebugEnabled()) {
log.debug("scanSingleNode node plugin=" + plugin.getName() + " node=" + historyReference.getURI().toString());
// depends on control dependency: [if], data = [none]
}
test = plugin.getClass().getDeclaredConstructor().newInstance();
// depends on control dependency: [try], data = [none]
test.setConfig(plugin.getConfig());
// depends on control dependency: [try], data = [none]
if (this.ruleConfigParam != null) {
// Set the configuration rules
for (RuleConfig rc : this.ruleConfigParam.getAllRuleConfigs()) {
test.getConfig().setProperty(rc.getKey(), rc.getValue());
// depends on control dependency: [for], data = [rc]
}
}
test.setDelayInMs(plugin.getDelayInMs());
// depends on control dependency: [try], data = [none]
test.setDefaultAlertThreshold(plugin.getAlertThreshold());
// depends on control dependency: [try], data = [none]
test.setDefaultAttackStrength(plugin.getAttackStrength());
// depends on control dependency: [try], data = [none]
test.setTechSet(getTechSet());
// depends on control dependency: [try], data = [none]
test.init(msg, this);
// depends on control dependency: [try], data = [none]
notifyHostProgress(plugin.getName() + ": " + msg.getRequestHeader().getURI().toString());
// depends on control dependency: [try], data = [none]
} catch (Exception e) {
log.error(e.getMessage() + " " + historyReference.getURI().toString(), e);
return false;
}
// depends on control dependency: [catch], data = [none]
Thread thread;
do {
if (this.isStop()) {
return false;
// depends on control dependency: [if], data = [none]
}
thread = threadPool.getFreeThreadAndRun(test);
if (thread == null) {
Util.sleep(200);
// depends on control dependency: [if], data = [none]
}
} while (thread == null);
mapPluginStats.get(plugin.getId()).incProgress();
return true;
} }
|
public class class_name {
public StoredCredential setAccessToken(String accessToken) {
lock.lock();
try {
this.accessToken = accessToken;
} finally {
lock.unlock();
}
return this;
} }
|
public class class_name {
public StoredCredential setAccessToken(String accessToken) {
lock.lock();
try {
this.accessToken = accessToken; // depends on control dependency: [try], data = [none]
} finally {
lock.unlock();
}
return this;
} }
|
public class class_name {
@Override
public void initModule() throws ModuleInitializationException {
int level = getRequiredInt("level", 0, 1);
if (level == 0) {
logger.info("Disabled in configuration ( level={} ), exiting", level);
return;
}
boolean syncUpdates = getBoolean("syncUpdates", false);
try {
TriplestoreConnector connector = null;
String ds = getParameter("datastore");
if (ds != null){
logger.info("Configuring triplestore connector from fcfg datastore configuration {}", ds);
connector =
getConnector(getServer()
.getDatastoreConfig(ds));
} else {
logger.info("Configuring triplestore from bean {}", TriplestoreConnector.class.getName());
connector =
getServer().getBean(TriplestoreConnector.class.getName(),TriplestoreConnector.class);
}
TripleGenerator generator =
getServer().getBean(TripleGenerator.class.getName(), TripleGenerator.class);
_ri = new ResourceIndexImpl(connector,
generator,
level,
syncUpdates);
setAliasMap(getAliases());
} catch (Exception e) {
throw new ModuleInitializationException("Error initializing RI",
getRole(),
e);
}
} }
|
public class class_name {
@Override
public void initModule() throws ModuleInitializationException {
int level = getRequiredInt("level", 0, 1);
if (level == 0) {
logger.info("Disabled in configuration ( level={} ), exiting", level);
return;
}
boolean syncUpdates = getBoolean("syncUpdates", false);
try {
TriplestoreConnector connector = null;
String ds = getParameter("datastore");
if (ds != null){
logger.info("Configuring triplestore connector from fcfg datastore configuration {}", ds); // depends on control dependency: [if], data = [none]
connector =
getConnector(getServer()
.getDatastoreConfig(ds)); // depends on control dependency: [if], data = [none]
} else {
logger.info("Configuring triplestore from bean {}", TriplestoreConnector.class.getName()); // depends on control dependency: [if], data = [none]
connector =
getServer().getBean(TriplestoreConnector.class.getName(),TriplestoreConnector.class); // depends on control dependency: [if], data = [none]
}
TripleGenerator generator =
getServer().getBean(TripleGenerator.class.getName(), TripleGenerator.class);
_ri = new ResourceIndexImpl(connector,
generator,
level,
syncUpdates);
setAliasMap(getAliases());
} catch (Exception e) {
throw new ModuleInitializationException("Error initializing RI",
getRole(),
e);
}
} }
|
public class class_name {
public DirectConnectGatewayAssociationProposal withExistingAllowedPrefixesToDirectConnectGateway(
RouteFilterPrefix... existingAllowedPrefixesToDirectConnectGateway) {
if (this.existingAllowedPrefixesToDirectConnectGateway == null) {
setExistingAllowedPrefixesToDirectConnectGateway(new com.amazonaws.internal.SdkInternalList<RouteFilterPrefix>(
existingAllowedPrefixesToDirectConnectGateway.length));
}
for (RouteFilterPrefix ele : existingAllowedPrefixesToDirectConnectGateway) {
this.existingAllowedPrefixesToDirectConnectGateway.add(ele);
}
return this;
} }
|
public class class_name {
public DirectConnectGatewayAssociationProposal withExistingAllowedPrefixesToDirectConnectGateway(
RouteFilterPrefix... existingAllowedPrefixesToDirectConnectGateway) {
if (this.existingAllowedPrefixesToDirectConnectGateway == null) {
setExistingAllowedPrefixesToDirectConnectGateway(new com.amazonaws.internal.SdkInternalList<RouteFilterPrefix>(
existingAllowedPrefixesToDirectConnectGateway.length)); // depends on control dependency: [if], data = [none]
}
for (RouteFilterPrefix ele : existingAllowedPrefixesToDirectConnectGateway) {
this.existingAllowedPrefixesToDirectConnectGateway.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
public DescribeConfigurationSettingsResult withConfigurationSettings(ConfigurationSettingsDescription... configurationSettings) {
if (this.configurationSettings == null) {
setConfigurationSettings(new com.amazonaws.internal.SdkInternalList<ConfigurationSettingsDescription>(configurationSettings.length));
}
for (ConfigurationSettingsDescription ele : configurationSettings) {
this.configurationSettings.add(ele);
}
return this;
} }
|
public class class_name {
public DescribeConfigurationSettingsResult withConfigurationSettings(ConfigurationSettingsDescription... configurationSettings) {
if (this.configurationSettings == null) {
setConfigurationSettings(new com.amazonaws.internal.SdkInternalList<ConfigurationSettingsDescription>(configurationSettings.length)); // depends on control dependency: [if], data = [none]
}
for (ConfigurationSettingsDescription ele : configurationSettings) {
this.configurationSettings.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
CharSequence buildHistory() {
StringBuilder historyText = new StringBuilder(1000);
SQLiteOpenHelper helper = new DBHelper(activity);
try (SQLiteDatabase db = helper.getReadableDatabase();
Cursor cursor = db.query(DBHelper.TABLE_NAME,
COLUMNS,
null, null, null, null,
DBHelper.TIMESTAMP_COL + " DESC")) {
DateFormat format = DateFormat.getDateTimeInstance(DateFormat.MEDIUM, DateFormat.MEDIUM);
while (cursor.moveToNext()) {
historyText.append('"').append(massageHistoryField(cursor.getString(0))).append("\",");
historyText.append('"').append(massageHistoryField(cursor.getString(1))).append("\",");
historyText.append('"').append(massageHistoryField(cursor.getString(2))).append("\",");
historyText.append('"').append(massageHistoryField(cursor.getString(3))).append("\",");
// Add timestamp again, formatted
long timestamp = cursor.getLong(3);
historyText.append('"').append(massageHistoryField(format.format(timestamp))).append("\",");
// Above we're preserving the old ordering of columns which had formatted data in position 5
historyText.append('"').append(massageHistoryField(cursor.getString(4))).append("\"\r\n");
}
} catch (SQLException sqle) {
Log.w(TAG, sqle);
}
return historyText;
} }
|
public class class_name {
CharSequence buildHistory() {
StringBuilder historyText = new StringBuilder(1000);
SQLiteOpenHelper helper = new DBHelper(activity);
try (SQLiteDatabase db = helper.getReadableDatabase();
Cursor cursor = db.query(DBHelper.TABLE_NAME,
COLUMNS,
null, null, null, null,
DBHelper.TIMESTAMP_COL + " DESC")) {
DateFormat format = DateFormat.getDateTimeInstance(DateFormat.MEDIUM, DateFormat.MEDIUM);
while (cursor.moveToNext()) {
historyText.append('"').append(massageHistoryField(cursor.getString(0))).append("\","); // depends on control dependency: [while], data = [none]
historyText.append('"').append(massageHistoryField(cursor.getString(1))).append("\","); // depends on control dependency: [while], data = [none]
historyText.append('"').append(massageHistoryField(cursor.getString(2))).append("\","); // depends on control dependency: [while], data = [none]
historyText.append('"').append(massageHistoryField(cursor.getString(3))).append("\","); // depends on control dependency: [while], data = [none]
// Add timestamp again, formatted
long timestamp = cursor.getLong(3);
historyText.append('"').append(massageHistoryField(format.format(timestamp))).append("\","); // depends on control dependency: [while], data = [none]
// Above we're preserving the old ordering of columns which had formatted data in position 5
historyText.append('"').append(massageHistoryField(cursor.getString(4))).append("\"\r\n"); // depends on control dependency: [while], data = [none]
}
} catch (SQLException sqle) {
Log.w(TAG, sqle);
}
return historyText;
} }
|
public class class_name {
private <E> boolean onCheckCollectionAttribute(PluralAttribute<? super X, ?, ?> pluralAttribute, Class<E> paramClass)
{
if (pluralAttribute != null)
{
if (isCollectionAttribute(pluralAttribute) && isBindable(pluralAttribute, paramClass))
{
return true;
}
}
return false;
} }
|
public class class_name {
private <E> boolean onCheckCollectionAttribute(PluralAttribute<? super X, ?, ?> pluralAttribute, Class<E> paramClass)
{
if (pluralAttribute != null)
{
if (isCollectionAttribute(pluralAttribute) && isBindable(pluralAttribute, paramClass))
{
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} }
|
public class class_name {
public static NodeSequence skip( final NodeSequence sequence,
final int skip ) {
if (sequence == null) return emptySequence(0);
if (skip <= 0 || sequence.isEmpty()) return sequence;
return new NodeSequence() {
private int rowsToSkip = skip;
@Override
public long getRowCount() {
long count = sequence.getRowCount();
if (count < skip) return -1;
return count == skip ? 0 : count - skip;
}
@Override
public int width() {
return sequence.width();
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public Batch nextBatch() {
Batch next = sequence.nextBatch();
while (next != null) {
if (rowsToSkip <= 0) return next;
long size = next.rowCount();
if (size >= 0) {
// The size of this batch is known ...
if (size == 0) {
// but it is empty, so just skip this batch altogether ...
next = sequence.nextBatch();
continue;
}
if (size <= rowsToSkip) {
// The entire batch is smaller than the number of rows we're skipping, so skip the whole batch ...
rowsToSkip -= size;
next = sequence.nextBatch();
continue;
}
// Otherwise, we have to skip the first `rowsToSkip` rows in the batch ...
for (int i = 0; i != rowsToSkip; ++i) {
if (!next.hasNext()) return null;
next.nextRow();
--size;
}
rowsToSkip = 0;
return new AlternateSizeBatch(next, size);
}
// Otherwise the size of the batch is not known, so we need to skip the rows individually ...
while (rowsToSkip > 0 && next.hasNext()) {
next.nextRow();
--rowsToSkip;
}
if (next.hasNext()) return next;
// Otherwise, we've used up all of this batch so just continue to the next ...
next = sequence.nextBatch();
}
return next;
}
@Override
public void close() {
sequence.close();
}
@Override
public String toString() {
return "(skip " + skip + " " + sequence + " )";
}
};
} }
|
public class class_name {
public static NodeSequence skip( final NodeSequence sequence,
final int skip ) {
if (sequence == null) return emptySequence(0);
if (skip <= 0 || sequence.isEmpty()) return sequence;
return new NodeSequence() {
private int rowsToSkip = skip;
@Override
public long getRowCount() {
long count = sequence.getRowCount();
if (count < skip) return -1;
return count == skip ? 0 : count - skip;
}
@Override
public int width() {
return sequence.width();
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public Batch nextBatch() {
Batch next = sequence.nextBatch();
while (next != null) {
if (rowsToSkip <= 0) return next;
long size = next.rowCount();
if (size >= 0) {
// The size of this batch is known ...
if (size == 0) {
// but it is empty, so just skip this batch altogether ...
next = sequence.nextBatch(); // depends on control dependency: [if], data = [none]
continue;
}
if (size <= rowsToSkip) {
// The entire batch is smaller than the number of rows we're skipping, so skip the whole batch ...
rowsToSkip -= size; // depends on control dependency: [if], data = [none]
next = sequence.nextBatch(); // depends on control dependency: [if], data = [none]
continue;
}
// Otherwise, we have to skip the first `rowsToSkip` rows in the batch ...
for (int i = 0; i != rowsToSkip; ++i) {
if (!next.hasNext()) return null;
next.nextRow(); // depends on control dependency: [for], data = [none]
--size; // depends on control dependency: [for], data = [none]
}
rowsToSkip = 0; // depends on control dependency: [if], data = [none]
return new AlternateSizeBatch(next, size); // depends on control dependency: [if], data = [none]
}
// Otherwise the size of the batch is not known, so we need to skip the rows individually ...
while (rowsToSkip > 0 && next.hasNext()) {
next.nextRow(); // depends on control dependency: [while], data = [none]
--rowsToSkip; // depends on control dependency: [while], data = [none]
}
if (next.hasNext()) return next;
// Otherwise, we've used up all of this batch so just continue to the next ...
next = sequence.nextBatch(); // depends on control dependency: [while], data = [none]
}
return next;
}
@Override
public void close() {
sequence.close();
}
@Override
public String toString() {
return "(skip " + skip + " " + sequence + " )";
}
};
} }
|
public class class_name {
@Override
public CPDAvailabilityEstimate fetchByCommerceAvailabilityEstimateId_Last(
long commerceAvailabilityEstimateId,
OrderByComparator<CPDAvailabilityEstimate> orderByComparator) {
int count = countByCommerceAvailabilityEstimateId(commerceAvailabilityEstimateId);
if (count == 0) {
return null;
}
List<CPDAvailabilityEstimate> list = findByCommerceAvailabilityEstimateId(commerceAvailabilityEstimateId,
count - 1, count, orderByComparator);
if (!list.isEmpty()) {
return list.get(0);
}
return null;
} }
|
public class class_name {
@Override
public CPDAvailabilityEstimate fetchByCommerceAvailabilityEstimateId_Last(
long commerceAvailabilityEstimateId,
OrderByComparator<CPDAvailabilityEstimate> orderByComparator) {
int count = countByCommerceAvailabilityEstimateId(commerceAvailabilityEstimateId);
if (count == 0) {
return null; // depends on control dependency: [if], data = [none]
}
List<CPDAvailabilityEstimate> list = findByCommerceAvailabilityEstimateId(commerceAvailabilityEstimateId,
count - 1, count, orderByComparator);
if (!list.isEmpty()) {
return list.get(0); // depends on control dependency: [if], data = [none]
}
return null;
} }
|
public class class_name {
public SIDestinationAddress getDestinationAddress() {
final String methodName = "getDestinationAddress";
if (TRACE.isEntryEnabled()) {
SibTr.entry(this, TRACE, methodName);
}
final SIDestinationAddress address = _delegateSession
.getDestinationAddress();
if (TRACE.isEntryEnabled()) {
SibTr.exit(this, TRACE, methodName, _parentConnection);
}
return address;
} }
|
public class class_name {
public SIDestinationAddress getDestinationAddress() {
final String methodName = "getDestinationAddress";
if (TRACE.isEntryEnabled()) {
SibTr.entry(this, TRACE, methodName); // depends on control dependency: [if], data = [none]
}
final SIDestinationAddress address = _delegateSession
.getDestinationAddress();
if (TRACE.isEntryEnabled()) {
SibTr.exit(this, TRACE, methodName, _parentConnection); // depends on control dependency: [if], data = [none]
}
return address;
} }
|
public class class_name {
private void removeSerializable(BCSChild bcsc)
{
if (serializable > 0 && bcsc.child instanceof Serializable && (bcsc.proxyPeer == null || bcsc.proxyPeer instanceof Serializable))
{
serializable--;
}
} }
|
public class class_name {
private void removeSerializable(BCSChild bcsc)
{
if (serializable > 0 && bcsc.child instanceof Serializable && (bcsc.proxyPeer == null || bcsc.proxyPeer instanceof Serializable))
{
serializable--; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public void enter(Symbol sym) {
Assert.check(shared == 0);
if (nelems * 3 >= hashMask * 2)
dble();
int hash = getIndex(sym.name);
Entry old = table[hash];
if (old == null) {
old = sentinel;
nelems++;
}
Entry e = new Entry(sym, old, elems, this);
table[hash] = e;
elems = e;
//notify listeners
listeners.symbolAdded(sym, this);
} }
|
public class class_name {
public void enter(Symbol sym) {
Assert.check(shared == 0);
if (nelems * 3 >= hashMask * 2)
dble();
int hash = getIndex(sym.name);
Entry old = table[hash];
if (old == null) {
old = sentinel; // depends on control dependency: [if], data = [none]
nelems++; // depends on control dependency: [if], data = [none]
}
Entry e = new Entry(sym, old, elems, this);
table[hash] = e;
elems = e;
//notify listeners
listeners.symbolAdded(sym, this);
} }
|
public class class_name {
public UnarchiveFindingsRequest withFindingIds(String... findingIds) {
if (this.findingIds == null) {
setFindingIds(new java.util.ArrayList<String>(findingIds.length));
}
for (String ele : findingIds) {
this.findingIds.add(ele);
}
return this;
} }
|
public class class_name {
public UnarchiveFindingsRequest withFindingIds(String... findingIds) {
if (this.findingIds == null) {
setFindingIds(new java.util.ArrayList<String>(findingIds.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : findingIds) {
this.findingIds.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
private int _internalRead (final char [] aBuf, final int nOfs, final int nLen) throws IOException
{
if (m_nNextCharIndex >= m_nChars)
{
/*
* If the requested length is at least as large as the buffer, and if
* there is no mark/reset activity, and if line feeds are not being
* skipped, do not bother to copy the characters into the local buffer. In
* this way buffered streams will cascade harmlessly.
*/
if (nLen >= m_aBuf.length && m_nMarkedChar <= UNMARKED && !m_bSkipLF)
return m_aReader.read (aBuf, nOfs, nLen);
_fill ();
}
if (m_nNextCharIndex >= m_nChars)
return -1;
if (m_bSkipLF)
{
m_bSkipLF = false;
if (m_aBuf[m_nNextCharIndex] == '\n')
{
m_nNextCharIndex++;
if (m_nNextCharIndex >= m_nChars)
_fill ();
if (m_nNextCharIndex >= m_nChars)
return -1;
}
}
final int nBytesRead = Math.min (nLen, m_nChars - m_nNextCharIndex);
System.arraycopy (m_aBuf, m_nNextCharIndex, aBuf, nOfs, nBytesRead);
m_nNextCharIndex += nBytesRead;
return nBytesRead;
} }
|
public class class_name {
private int _internalRead (final char [] aBuf, final int nOfs, final int nLen) throws IOException
{
if (m_nNextCharIndex >= m_nChars)
{
/*
* If the requested length is at least as large as the buffer, and if
* there is no mark/reset activity, and if line feeds are not being
* skipped, do not bother to copy the characters into the local buffer. In
* this way buffered streams will cascade harmlessly.
*/
if (nLen >= m_aBuf.length && m_nMarkedChar <= UNMARKED && !m_bSkipLF)
return m_aReader.read (aBuf, nOfs, nLen);
_fill ();
}
if (m_nNextCharIndex >= m_nChars)
return -1;
if (m_bSkipLF)
{
m_bSkipLF = false;
if (m_aBuf[m_nNextCharIndex] == '\n')
{
m_nNextCharIndex++; // depends on control dependency: [if], data = [none]
if (m_nNextCharIndex >= m_nChars)
_fill ();
if (m_nNextCharIndex >= m_nChars)
return -1;
}
}
final int nBytesRead = Math.min (nLen, m_nChars - m_nNextCharIndex);
System.arraycopy (m_aBuf, m_nNextCharIndex, aBuf, nOfs, nBytesRead);
m_nNextCharIndex += nBytesRead;
return nBytesRead;
} }
|
public class class_name {
public static List<String> getAvailableLocaleSuffixesForBundle(
String messageBundlePath, String fileSuffix,
GrailsServletContextResourceReader rsReader) {
int idxNameSpace = messageBundlePath.indexOf("(");
int idxFilter = messageBundlePath.indexOf("[");
int idx = -1;
if (idxNameSpace != -1 && idxFilter != -1) {
idx = Math.min(idxNameSpace, idxFilter);
} else if (idxNameSpace != -1 && idxFilter == -1) {
idx = idxNameSpace;
} else if (idxNameSpace == -1 && idxFilter != -1) {
idx = idxFilter;
}
String messageBundle = null;
if (idx > 0) {
messageBundle = messageBundlePath.substring(0, idx);
} else {
messageBundle = messageBundlePath;
}
return getAvailableLocaleSuffixes(messageBundle, fileSuffix, rsReader);
} }
|
public class class_name {
public static List<String> getAvailableLocaleSuffixesForBundle(
String messageBundlePath, String fileSuffix,
GrailsServletContextResourceReader rsReader) {
int idxNameSpace = messageBundlePath.indexOf("(");
int idxFilter = messageBundlePath.indexOf("[");
int idx = -1;
if (idxNameSpace != -1 && idxFilter != -1) {
idx = Math.min(idxNameSpace, idxFilter); // depends on control dependency: [if], data = [(idxNameSpace]
} else if (idxNameSpace != -1 && idxFilter == -1) {
idx = idxNameSpace; // depends on control dependency: [if], data = [none]
} else if (idxNameSpace == -1 && idxFilter != -1) {
idx = idxFilter; // depends on control dependency: [if], data = [none]
}
String messageBundle = null;
if (idx > 0) {
messageBundle = messageBundlePath.substring(0, idx); // depends on control dependency: [if], data = [none]
} else {
messageBundle = messageBundlePath; // depends on control dependency: [if], data = [none]
}
return getAvailableLocaleSuffixes(messageBundle, fileSuffix, rsReader);
} }
|
public class class_name {
@Override
public <T> List<Object> getAllCacheKeys(final Class<T> clazz) {
try {
return getObjectPersister(clazz).getAllCacheKeys();
} catch (CacheCreationException e) {
Ln.e(e);
return Collections.emptyList();
}
} }
|
public class class_name {
@Override
public <T> List<Object> getAllCacheKeys(final Class<T> clazz) {
try {
return getObjectPersister(clazz).getAllCacheKeys(); // depends on control dependency: [try], data = [none]
} catch (CacheCreationException e) {
Ln.e(e);
return Collections.emptyList();
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
protected void noteError(ServerStats stats, ClientRequest request, Throwable e, long responseTime) {
if (stats == null) {
return;
}
try {
recordStats(stats, responseTime);
RetryHandler errorHandler = getRetryHandler();
if (errorHandler != null && e != null) {
if (errorHandler.isCircuitTrippingException(e)) {
stats.incrementSuccessiveConnectionFailureCount();
stats.addToFailureCount();
} else {
stats.clearSuccessiveConnectionFailureCount();
}
}
} catch (Exception ex) {
logger.error("Error noting stats for client {}", clientName, ex);
}
} }
|
public class class_name {
protected void noteError(ServerStats stats, ClientRequest request, Throwable e, long responseTime) {
if (stats == null) {
return; // depends on control dependency: [if], data = [none]
}
try {
recordStats(stats, responseTime); // depends on control dependency: [try], data = [none]
RetryHandler errorHandler = getRetryHandler();
if (errorHandler != null && e != null) {
if (errorHandler.isCircuitTrippingException(e)) {
stats.incrementSuccessiveConnectionFailureCount(); // depends on control dependency: [if], data = [none]
stats.addToFailureCount(); // depends on control dependency: [if], data = [none]
} else {
stats.clearSuccessiveConnectionFailureCount(); // depends on control dependency: [if], data = [none]
}
}
} catch (Exception ex) {
logger.error("Error noting stats for client {}", clientName, ex);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
protected ComponentDescription detectExistingComponent(DeploymentUnit deploymentUnit) throws DeploymentUnitProcessingException {
final EEModuleDescription eeModuleDescription = deploymentUnit.getAttachment(Attachments.EE_MODULE_DESCRIPTION);
final EEApplicationClasses eeApplicationClasses = deploymentUnit.getAttachment(Attachments.EE_APPLICATION_CLASSES_DESCRIPTION);
final CompositeIndex compositeIndex = deploymentUnit.getAttachment(org.jboss.as.server.deployment.Attachments.COMPOSITE_ANNOTATION_INDEX);
final WarMetaData warMetaData = deploymentUnit.getAttachment(WarMetaData.ATTACHMENT_KEY);
// extract deployment metadata
List<AnnotationInstance> processApplicationAnnotations = null;
List<AnnotationInstance> postDeployAnnnotations = null;
List<AnnotationInstance> preUndeployAnnnotations = null;
Set<ClassInfo> servletProcessApplications = null;
if(compositeIndex != null) {
processApplicationAnnotations = compositeIndex.getAnnotations(DotName.createSimple(ProcessApplication.class.getName()));
postDeployAnnnotations = compositeIndex.getAnnotations(DotName.createSimple(PostDeploy.class.getName()));
preUndeployAnnnotations = compositeIndex.getAnnotations(DotName.createSimple(PreUndeploy.class.getName()));
servletProcessApplications = compositeIndex.getAllKnownSubclasses(DotName.createSimple(ServletProcessApplication.class.getName()));
} else {
return null;
}
if(processApplicationAnnotations.isEmpty()) {
// no pa found, this is not a process application deployment.
return null;
} else if(processApplicationAnnotations.size() > 1) {
// found multiple PAs -> unsupported.
throw new DeploymentUnitProcessingException("Detected multiple classes annotated with @" + ProcessApplication.class.getSimpleName()
+ ". A deployment must only provide a single @" + ProcessApplication.class.getSimpleName()
+ " class.");
} else {
// found single PA
AnnotationInstance annotationInstance = processApplicationAnnotations.get(0);
ClassInfo paClassInfo = (ClassInfo) annotationInstance.target();
String paClassName = paClassInfo.name().toString();
ComponentDescription paComponent = null;
// it can either be a Servlet Process Application or a Singleton Session Bean Component or
if(servletProcessApplications.contains(paClassInfo)) {
// Servlet Process Applications can only be deployed inside Web Applications
if(warMetaData == null) {
throw new DeploymentUnitProcessingException("@ProcessApplication class is a ServletProcessApplication but deployment is not a Web Application.");
}
// check whether it's already a servlet context listener:
JBossWebMetaData mergedJBossWebMetaData = warMetaData.getMergedJBossWebMetaData();
List<ListenerMetaData> listeners = mergedJBossWebMetaData.getListeners();
if(listeners == null) {
listeners = new ArrayList<ListenerMetaData>();
mergedJBossWebMetaData.setListeners(listeners);
}
boolean isListener = false;
for (ListenerMetaData listenerMetaData : listeners) {
if(listenerMetaData.getListenerClass().equals(paClassInfo.name().toString())) {
isListener = true;
}
}
if(!isListener) {
// register as Servlet Context Listener
ListenerMetaData listener = new ListenerMetaData();
listener.setListenerClass(paClassName);
listeners.add(listener);
// synthesize WebComponent
WebComponentDescription paWebComponent = new WebComponentDescription(paClassName,
paClassName,
eeModuleDescription,
deploymentUnit.getServiceName(),
eeApplicationClasses);
eeModuleDescription.addComponent(paWebComponent);
deploymentUnit.addToAttachmentList(WebComponentDescription.WEB_COMPONENTS, paWebComponent.getStartServiceName());
paComponent = paWebComponent;
} else {
// lookup the existing component
paComponent = eeModuleDescription.getComponentsByClassName(paClassName).get(0);
}
// deactivate sci
} else {
// if its not a ServletProcessApplication it must be a session bean component
List<ComponentDescription> componentsByClassName = eeModuleDescription.getComponentsByClassName(paClassName);
if (!componentsByClassName.isEmpty() && (componentsByClassName.get(0) instanceof SessionBeanComponentDescription)) {
paComponent = componentsByClassName.get(0);
} else {
throw new DeploymentUnitProcessingException("Class " + paClassName + " is annotated with @" + ProcessApplication.class.getSimpleName()
+ " but is neither a ServletProcessApplication nor an EJB Session Bean Component.");
}
}
// attach additional metadata to the deployment unit
if(!postDeployAnnnotations.isEmpty()) {
if(postDeployAnnnotations.size()==1) {
ProcessApplicationAttachments.attachPostDeployDescription(deploymentUnit, postDeployAnnnotations.get(0));
} else {
throw new DeploymentUnitProcessingException("There can only be a single method annotated with @PostDeploy. Found ["+postDeployAnnnotations+"]");
}
}
if(!preUndeployAnnnotations.isEmpty()) {
if(preUndeployAnnnotations.size()==1) {
ProcessApplicationAttachments.attachPreUndeployDescription(deploymentUnit, preUndeployAnnnotations.get(0));
} else {
throw new DeploymentUnitProcessingException("There can only be a single method annotated with @PreUndeploy. Found ["+preUndeployAnnnotations+"]");
}
}
return paComponent;
}
} }
|
public class class_name {
protected ComponentDescription detectExistingComponent(DeploymentUnit deploymentUnit) throws DeploymentUnitProcessingException {
final EEModuleDescription eeModuleDescription = deploymentUnit.getAttachment(Attachments.EE_MODULE_DESCRIPTION);
final EEApplicationClasses eeApplicationClasses = deploymentUnit.getAttachment(Attachments.EE_APPLICATION_CLASSES_DESCRIPTION);
final CompositeIndex compositeIndex = deploymentUnit.getAttachment(org.jboss.as.server.deployment.Attachments.COMPOSITE_ANNOTATION_INDEX);
final WarMetaData warMetaData = deploymentUnit.getAttachment(WarMetaData.ATTACHMENT_KEY);
// extract deployment metadata
List<AnnotationInstance> processApplicationAnnotations = null;
List<AnnotationInstance> postDeployAnnnotations = null;
List<AnnotationInstance> preUndeployAnnnotations = null;
Set<ClassInfo> servletProcessApplications = null;
if(compositeIndex != null) {
processApplicationAnnotations = compositeIndex.getAnnotations(DotName.createSimple(ProcessApplication.class.getName()));
postDeployAnnnotations = compositeIndex.getAnnotations(DotName.createSimple(PostDeploy.class.getName()));
preUndeployAnnnotations = compositeIndex.getAnnotations(DotName.createSimple(PreUndeploy.class.getName()));
servletProcessApplications = compositeIndex.getAllKnownSubclasses(DotName.createSimple(ServletProcessApplication.class.getName()));
} else {
return null;
}
if(processApplicationAnnotations.isEmpty()) {
// no pa found, this is not a process application deployment.
return null;
} else if(processApplicationAnnotations.size() > 1) {
// found multiple PAs -> unsupported.
throw new DeploymentUnitProcessingException("Detected multiple classes annotated with @" + ProcessApplication.class.getSimpleName()
+ ". A deployment must only provide a single @" + ProcessApplication.class.getSimpleName()
+ " class.");
} else {
// found single PA
AnnotationInstance annotationInstance = processApplicationAnnotations.get(0);
ClassInfo paClassInfo = (ClassInfo) annotationInstance.target();
String paClassName = paClassInfo.name().toString();
ComponentDescription paComponent = null;
// it can either be a Servlet Process Application or a Singleton Session Bean Component or
if(servletProcessApplications.contains(paClassInfo)) {
// Servlet Process Applications can only be deployed inside Web Applications
if(warMetaData == null) {
throw new DeploymentUnitProcessingException("@ProcessApplication class is a ServletProcessApplication but deployment is not a Web Application.");
}
// check whether it's already a servlet context listener:
JBossWebMetaData mergedJBossWebMetaData = warMetaData.getMergedJBossWebMetaData();
List<ListenerMetaData> listeners = mergedJBossWebMetaData.getListeners();
if(listeners == null) {
listeners = new ArrayList<ListenerMetaData>();
mergedJBossWebMetaData.setListeners(listeners);
}
boolean isListener = false;
for (ListenerMetaData listenerMetaData : listeners) {
if(listenerMetaData.getListenerClass().equals(paClassInfo.name().toString())) {
isListener = true; // depends on control dependency: [if], data = [none]
}
}
if(!isListener) {
// register as Servlet Context Listener
ListenerMetaData listener = new ListenerMetaData();
listener.setListenerClass(paClassName);
listeners.add(listener);
// synthesize WebComponent
WebComponentDescription paWebComponent = new WebComponentDescription(paClassName,
paClassName,
eeModuleDescription,
deploymentUnit.getServiceName(),
eeApplicationClasses);
eeModuleDescription.addComponent(paWebComponent);
deploymentUnit.addToAttachmentList(WebComponentDescription.WEB_COMPONENTS, paWebComponent.getStartServiceName());
paComponent = paWebComponent;
} else {
// lookup the existing component
paComponent = eeModuleDescription.getComponentsByClassName(paClassName).get(0);
}
// deactivate sci
} else {
// if its not a ServletProcessApplication it must be a session bean component
List<ComponentDescription> componentsByClassName = eeModuleDescription.getComponentsByClassName(paClassName);
if (!componentsByClassName.isEmpty() && (componentsByClassName.get(0) instanceof SessionBeanComponentDescription)) {
paComponent = componentsByClassName.get(0);
} else {
throw new DeploymentUnitProcessingException("Class " + paClassName + " is annotated with @" + ProcessApplication.class.getSimpleName()
+ " but is neither a ServletProcessApplication nor an EJB Session Bean Component.");
}
}
// attach additional metadata to the deployment unit
if(!postDeployAnnnotations.isEmpty()) {
if(postDeployAnnnotations.size()==1) {
ProcessApplicationAttachments.attachPostDeployDescription(deploymentUnit, postDeployAnnnotations.get(0));
} else {
throw new DeploymentUnitProcessingException("There can only be a single method annotated with @PostDeploy. Found ["+postDeployAnnnotations+"]");
}
}
if(!preUndeployAnnnotations.isEmpty()) {
if(preUndeployAnnnotations.size()==1) {
ProcessApplicationAttachments.attachPreUndeployDescription(deploymentUnit, preUndeployAnnnotations.get(0)); // depends on control dependency: [if], data = [none]
} else {
throw new DeploymentUnitProcessingException("There can only be a single method annotated with @PreUndeploy. Found ["+preUndeployAnnnotations+"]");
}
}
return paComponent;
}
} }
|
public class class_name {
public void marshall(CreateJobOutput createJobOutput, ProtocolMarshaller protocolMarshaller) {
if (createJobOutput == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(createJobOutput.getKey(), KEY_BINDING);
protocolMarshaller.marshall(createJobOutput.getThumbnailPattern(), THUMBNAILPATTERN_BINDING);
protocolMarshaller.marshall(createJobOutput.getThumbnailEncryption(), THUMBNAILENCRYPTION_BINDING);
protocolMarshaller.marshall(createJobOutput.getRotate(), ROTATE_BINDING);
protocolMarshaller.marshall(createJobOutput.getPresetId(), PRESETID_BINDING);
protocolMarshaller.marshall(createJobOutput.getSegmentDuration(), SEGMENTDURATION_BINDING);
protocolMarshaller.marshall(createJobOutput.getWatermarks(), WATERMARKS_BINDING);
protocolMarshaller.marshall(createJobOutput.getAlbumArt(), ALBUMART_BINDING);
protocolMarshaller.marshall(createJobOutput.getComposition(), COMPOSITION_BINDING);
protocolMarshaller.marshall(createJobOutput.getCaptions(), CAPTIONS_BINDING);
protocolMarshaller.marshall(createJobOutput.getEncryption(), ENCRYPTION_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(CreateJobOutput createJobOutput, ProtocolMarshaller protocolMarshaller) {
if (createJobOutput == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(createJobOutput.getKey(), KEY_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(createJobOutput.getThumbnailPattern(), THUMBNAILPATTERN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(createJobOutput.getThumbnailEncryption(), THUMBNAILENCRYPTION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(createJobOutput.getRotate(), ROTATE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(createJobOutput.getPresetId(), PRESETID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(createJobOutput.getSegmentDuration(), SEGMENTDURATION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(createJobOutput.getWatermarks(), WATERMARKS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(createJobOutput.getAlbumArt(), ALBUMART_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(createJobOutput.getComposition(), COMPOSITION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(createJobOutput.getCaptions(), CAPTIONS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(createJobOutput.getEncryption(), ENCRYPTION_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public void marshall(KeyRange keyRange, ProtocolMarshaller protocolMarshaller) {
if (keyRange == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(keyRange.getBeginMarker(), BEGINMARKER_BINDING);
protocolMarshaller.marshall(keyRange.getEndMarker(), ENDMARKER_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(KeyRange keyRange, ProtocolMarshaller protocolMarshaller) {
if (keyRange == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(keyRange.getBeginMarker(), BEGINMARKER_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(keyRange.getEndMarker(), ENDMARKER_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static <T> void onDiscard(@Nullable T element, Context context) {
Consumer<Object> hook = context.getOrDefault(Hooks.KEY_ON_DISCARD, null);
if (element != null && hook != null) {
try {
hook.accept(element);
}
catch (Throwable t) {
log.warn("Error in discard hook", t);
}
}
} }
|
public class class_name {
public static <T> void onDiscard(@Nullable T element, Context context) {
Consumer<Object> hook = context.getOrDefault(Hooks.KEY_ON_DISCARD, null);
if (element != null && hook != null) {
try {
hook.accept(element); // depends on control dependency: [try], data = [none]
}
catch (Throwable t) {
log.warn("Error in discard hook", t);
} // depends on control dependency: [catch], data = [none]
}
} }
|
public class class_name {
public Object get(String name)
throws CertificateException, IOException {
X509AttributeName attrName = new X509AttributeName(name);
int attr = attributeMap(attrName.getPrefix());
if (attr == 0) {
throw new CertificateParsingException(
"Attribute name not recognized: " + name);
}
String suffix = attrName.getSuffix();
switch (attr) { // frequently used attributes first
case (ATTR_EXTENSIONS):
if (suffix == null) {
return(extensions);
} else {
if (extensions == null) {
return null;
} else {
return(extensions.get(suffix));
}
}
case (ATTR_SUBJECT):
if (suffix == null) {
return(subject);
} else {
return(getX500Name(suffix, false));
}
case (ATTR_ISSUER):
if (suffix == null) {
return(issuer);
} else {
return(getX500Name(suffix, true));
}
case (ATTR_KEY):
if (suffix == null) {
return(pubKey);
} else {
return(pubKey.get(suffix));
}
case (ATTR_ALGORITHM):
if (suffix == null) {
return(algId);
} else {
return(algId.get(suffix));
}
case (ATTR_VALIDITY):
if (suffix == null) {
return(interval);
} else {
return(interval.get(suffix));
}
case (ATTR_VERSION):
if (suffix == null) {
return(version);
} else {
return(version.get(suffix));
}
case (ATTR_SERIAL):
if (suffix == null) {
return(serialNum);
} else {
return(serialNum.get(suffix));
}
case (ATTR_ISSUER_ID):
return(issuerUniqueId);
case (ATTR_SUBJECT_ID):
return(subjectUniqueId);
}
return null;
} }
|
public class class_name {
public Object get(String name)
throws CertificateException, IOException {
X509AttributeName attrName = new X509AttributeName(name);
int attr = attributeMap(attrName.getPrefix());
if (attr == 0) {
throw new CertificateParsingException(
"Attribute name not recognized: " + name);
}
String suffix = attrName.getSuffix();
switch (attr) { // frequently used attributes first
case (ATTR_EXTENSIONS):
if (suffix == null) {
return(extensions); // depends on control dependency: [if], data = [none]
} else {
if (extensions == null) {
return null; // depends on control dependency: [if], data = [none]
} else {
return(extensions.get(suffix)); // depends on control dependency: [if], data = [(extensions]
}
}
case (ATTR_SUBJECT):
if (suffix == null) {
return(subject); // depends on control dependency: [if], data = [none]
} else {
return(getX500Name(suffix, false)); // depends on control dependency: [if], data = [(suffix]
}
case (ATTR_ISSUER):
if (suffix == null) {
return(issuer); // depends on control dependency: [if], data = [none]
} else {
return(getX500Name(suffix, true)); // depends on control dependency: [if], data = [(suffix]
}
case (ATTR_KEY):
if (suffix == null) {
return(pubKey); // depends on control dependency: [if], data = [none]
} else {
return(pubKey.get(suffix)); // depends on control dependency: [if], data = [(suffix]
}
case (ATTR_ALGORITHM):
if (suffix == null) {
return(algId); // depends on control dependency: [if], data = [none]
} else {
return(algId.get(suffix)); // depends on control dependency: [if], data = [(suffix]
}
case (ATTR_VALIDITY):
if (suffix == null) {
return(interval); // depends on control dependency: [if], data = [none]
} else {
return(interval.get(suffix)); // depends on control dependency: [if], data = [(suffix]
}
case (ATTR_VERSION):
if (suffix == null) {
return(version); // depends on control dependency: [if], data = [none]
} else {
return(version.get(suffix)); // depends on control dependency: [if], data = [(suffix]
}
case (ATTR_SERIAL):
if (suffix == null) {
return(serialNum); // depends on control dependency: [if], data = [none]
} else {
return(serialNum.get(suffix)); // depends on control dependency: [if], data = [(suffix]
}
case (ATTR_ISSUER_ID):
return(issuerUniqueId);
case (ATTR_SUBJECT_ID):
return(subjectUniqueId);
}
return null;
} }
|
public class class_name {
public final void mINSERT_REALTIME() throws RecognitionException {
try {
int _type = INSERT_REALTIME;
int _channel = DEFAULT_TOKEN_CHANNEL;
// druidG.g:583:17: ( ( 'INSERT_REALTIME' | 'insert_realtime' ) )
// druidG.g:583:18: ( 'INSERT_REALTIME' | 'insert_realtime' )
{
// druidG.g:583:18: ( 'INSERT_REALTIME' | 'insert_realtime' )
int alt3=2;
int LA3_0 = input.LA(1);
if ( (LA3_0=='I') ) {
alt3=1;
}
else if ( (LA3_0=='i') ) {
alt3=2;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 3, 0, input);
throw nvae;
}
switch (alt3) {
case 1 :
// druidG.g:583:19: 'INSERT_REALTIME'
{
match("INSERT_REALTIME");
}
break;
case 2 :
// druidG.g:583:37: 'insert_realtime'
{
match("insert_realtime");
}
break;
}
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
} }
|
public class class_name {
public final void mINSERT_REALTIME() throws RecognitionException {
try {
int _type = INSERT_REALTIME;
int _channel = DEFAULT_TOKEN_CHANNEL;
// druidG.g:583:17: ( ( 'INSERT_REALTIME' | 'insert_realtime' ) )
// druidG.g:583:18: ( 'INSERT_REALTIME' | 'insert_realtime' )
{
// druidG.g:583:18: ( 'INSERT_REALTIME' | 'insert_realtime' )
int alt3=2;
int LA3_0 = input.LA(1);
if ( (LA3_0=='I') ) {
alt3=1; // depends on control dependency: [if], data = [none]
}
else if ( (LA3_0=='i') ) {
alt3=2; // depends on control dependency: [if], data = [none]
}
else {
NoViableAltException nvae =
new NoViableAltException("", 3, 0, input);
throw nvae;
}
switch (alt3) {
case 1 :
// druidG.g:583:19: 'INSERT_REALTIME'
{
match("INSERT_REALTIME");
}
break;
case 2 :
// druidG.g:583:37: 'insert_realtime'
{
match("insert_realtime");
}
break;
}
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
} }
|
public class class_name {
public BatchDetectKeyPhrasesRequest withTextList(String... textList) {
if (this.textList == null) {
setTextList(new java.util.ArrayList<String>(textList.length));
}
for (String ele : textList) {
this.textList.add(ele);
}
return this;
} }
|
public class class_name {
public BatchDetectKeyPhrasesRequest withTextList(String... textList) {
if (this.textList == null) {
setTextList(new java.util.ArrayList<String>(textList.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : textList) {
this.textList.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
private static int getChartWindowHeight(final boolean fullPage) {
if (fullPage) {
return Math.max((int) (Page.getCurrent().getBrowserWindowHeight() * HEIGHT_PERCENTAGE_FULL_PAGE) ,MINIMUM_CHART_HEIGHT_FULL_PAGE);
} else {
return Math.max((int) (Page.getCurrent().getBrowserWindowHeight() * HEIGHT_PERCETAGE_HALF_PAGE),NINIMUM_CHART_HEIGHT_HALF_PAGE);
}
} }
|
public class class_name {
private static int getChartWindowHeight(final boolean fullPage) {
if (fullPage) {
return Math.max((int) (Page.getCurrent().getBrowserWindowHeight() * HEIGHT_PERCENTAGE_FULL_PAGE) ,MINIMUM_CHART_HEIGHT_FULL_PAGE); // depends on control dependency: [if], data = [none]
} else {
return Math.max((int) (Page.getCurrent().getBrowserWindowHeight() * HEIGHT_PERCETAGE_HALF_PAGE),NINIMUM_CHART_HEIGHT_HALF_PAGE); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
protected void recordRouterWay(String routerName) {
if (RpcInternalContext.isAttachmentEnable()) {
RpcInternalContext context = RpcInternalContext.getContext();
String record = (String) context.getAttachment(RpcConstants.INTERNAL_KEY_ROUTER_RECORD);
record = record == null ? routerName : record + ">" + routerName;
context.setAttachment(RpcConstants.INTERNAL_KEY_ROUTER_RECORD, record);
}
} }
|
public class class_name {
protected void recordRouterWay(String routerName) {
if (RpcInternalContext.isAttachmentEnable()) {
RpcInternalContext context = RpcInternalContext.getContext();
String record = (String) context.getAttachment(RpcConstants.INTERNAL_KEY_ROUTER_RECORD);
record = record == null ? routerName : record + ">" + routerName; // depends on control dependency: [if], data = [none]
context.setAttachment(RpcConstants.INTERNAL_KEY_ROUTER_RECORD, record); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public void marshall(ModelSummary modelSummary, ProtocolMarshaller protocolMarshaller) {
if (modelSummary == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(modelSummary.getModelName(), MODELNAME_BINDING);
protocolMarshaller.marshall(modelSummary.getModelArn(), MODELARN_BINDING);
protocolMarshaller.marshall(modelSummary.getCreationTime(), CREATIONTIME_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(ModelSummary modelSummary, ProtocolMarshaller protocolMarshaller) {
if (modelSummary == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(modelSummary.getModelName(), MODELNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(modelSummary.getModelArn(), MODELARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(modelSummary.getCreationTime(), CREATIONTIME_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public void contextDestroyed(ServletContextEvent sce)
{
synchronized (Publisher.class)
{
if (Publisher.worker != null)
{
Publisher.worker.interrupt();
}
}
} }
|
public class class_name {
public void contextDestroyed(ServletContextEvent sce)
{
synchronized (Publisher.class)
{
if (Publisher.worker != null)
{
Publisher.worker.interrupt(); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
private String computeUri(String target, String query, String anchor) {
StringBuffer uri = new StringBuffer(64);
uri.append(target);
if (query != null) {
uri.append('?');
uri.append(query);
}
if (anchor != null) {
uri.append('#');
uri.append(anchor);
}
return uri.toString();
} }
|
public class class_name {
private String computeUri(String target, String query, String anchor) {
StringBuffer uri = new StringBuffer(64);
uri.append(target);
if (query != null) {
uri.append('?'); // depends on control dependency: [if], data = [none]
uri.append(query); // depends on control dependency: [if], data = [(query]
}
if (anchor != null) {
uri.append('#'); // depends on control dependency: [if], data = [none]
uri.append(anchor); // depends on control dependency: [if], data = [(anchor]
}
return uri.toString();
} }
|
public class class_name {
protected void resize( VariableMatrix mat , int numRows , int numCols ) {
if( mat.isTemp() ) {
mat.matrix.reshape(numRows,numCols);
}
} }
|
public class class_name {
protected void resize( VariableMatrix mat , int numRows , int numCols ) {
if( mat.isTemp() ) {
mat.matrix.reshape(numRows,numCols); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Nonnull
private static CalendarHierarchy _createConfigurationHierarchy (@Nonnull final Configuration aConfig,
@Nullable final CalendarHierarchy aParent)
{
final ECountry eCountry = ECountry.getFromIDOrNull (aConfig.getHierarchy ());
final CalendarHierarchy aHierarchy = new CalendarHierarchy (aParent, aConfig.getHierarchy (), eCountry);
for (final Configuration aSubConfig : aConfig.getSubConfigurations ())
{
final CalendarHierarchy aSubHierarchy = _createConfigurationHierarchy (aSubConfig, aHierarchy);
aHierarchy.addChild (aSubHierarchy);
}
return aHierarchy;
} }
|
public class class_name {
@Nonnull
private static CalendarHierarchy _createConfigurationHierarchy (@Nonnull final Configuration aConfig,
@Nullable final CalendarHierarchy aParent)
{
final ECountry eCountry = ECountry.getFromIDOrNull (aConfig.getHierarchy ());
final CalendarHierarchy aHierarchy = new CalendarHierarchy (aParent, aConfig.getHierarchy (), eCountry);
for (final Configuration aSubConfig : aConfig.getSubConfigurations ())
{
final CalendarHierarchy aSubHierarchy = _createConfigurationHierarchy (aSubConfig, aHierarchy);
aHierarchy.addChild (aSubHierarchy); // depends on control dependency: [for], data = [none]
}
return aHierarchy;
} }
|
public class class_name {
protected int getQuality(String pQualityStr) {
if (!StringUtil.isEmpty(pQualityStr)) {
try {
// Get quality constant from Image using reflection
Class cl = Image.class;
Field field = cl.getField(pQualityStr.toUpperCase());
return field.getInt(null);
}
catch (IllegalAccessException ia) {
log("Unable to get quality.", ia);
}
catch (NoSuchFieldException nsf) {
log("Unable to get quality.", nsf);
}
}
return defaultScaleQuality;
} }
|
public class class_name {
protected int getQuality(String pQualityStr) {
if (!StringUtil.isEmpty(pQualityStr)) {
try {
// Get quality constant from Image using reflection
Class cl = Image.class;
Field field = cl.getField(pQualityStr.toUpperCase());
return field.getInt(null);
// depends on control dependency: [try], data = [none]
}
catch (IllegalAccessException ia) {
log("Unable to get quality.", ia);
}
// depends on control dependency: [catch], data = [none]
catch (NoSuchFieldException nsf) {
log("Unable to get quality.", nsf);
}
// depends on control dependency: [catch], data = [none]
}
return defaultScaleQuality;
} }
|
public class class_name {
public final Response setStatusLine(final String statusLine) {
this.statusLine = statusLine;
final String[] statusPieces = statusLine.split(" ");
if (statusPieces.length > 1) {
responseCode = Integer.parseInt(statusPieces[1]);
}
return this;
} }
|
public class class_name {
public final Response setStatusLine(final String statusLine) {
this.statusLine = statusLine;
final String[] statusPieces = statusLine.split(" ");
if (statusPieces.length > 1) {
responseCode = Integer.parseInt(statusPieces[1]); // depends on control dependency: [if], data = [none]
}
return this;
} }
|
public class class_name {
private static void load(final SharedPreferences pPrefs,
final Map<String, String> pMap, final String pPrefix) {
//potential fix for #1079 https://github.com/osmdroid/osmdroid/issues/1079
if (pPrefix==null || pMap==null) return;
pMap.clear();
for (final String key : pPrefs.getAll().keySet()) {
if (key!=null && key.startsWith(pPrefix)) {
pMap.put(key.substring(pPrefix.length()), pPrefs.getString(key, null));
}
}
} }
|
public class class_name {
private static void load(final SharedPreferences pPrefs,
final Map<String, String> pMap, final String pPrefix) {
//potential fix for #1079 https://github.com/osmdroid/osmdroid/issues/1079
if (pPrefix==null || pMap==null) return;
pMap.clear();
for (final String key : pPrefs.getAll().keySet()) {
if (key!=null && key.startsWith(pPrefix)) {
pMap.put(key.substring(pPrefix.length()), pPrefs.getString(key, null)); // depends on control dependency: [if], data = [(key]
}
}
} }
|
public class class_name {
private void boundaryAreaBoundaryArea_(int half_edge, int id_a, int id_b) {
if (m_matrix[MatrixPredicate.BoundaryBoundary] == 1)
return;
int parentage = m_topo_graph.getHalfEdgeParentage(half_edge);
if ((parentage & id_a) != 0 && (parentage & id_b) != 0) {
m_matrix[MatrixPredicate.BoundaryBoundary] = 1;
return;
}
if (m_matrix[MatrixPredicate.BoundaryBoundary] != 0) {
if (m_topo_graph.getHalfEdgeUserIndex(m_topo_graph
.getHalfEdgePrev(m_topo_graph.getHalfEdgeTwin(half_edge)),
m_visited_index) != 1) {
int cluster = m_topo_graph.getHalfEdgeTo(half_edge);
int clusterParentage = m_topo_graph
.getClusterParentage(cluster);
if ((clusterParentage & id_a) != 0
&& (clusterParentage & id_b) != 0) {
m_matrix[MatrixPredicate.BoundaryBoundary] = 0;
}
}
}
} }
|
public class class_name {
private void boundaryAreaBoundaryArea_(int half_edge, int id_a, int id_b) {
if (m_matrix[MatrixPredicate.BoundaryBoundary] == 1)
return;
int parentage = m_topo_graph.getHalfEdgeParentage(half_edge);
if ((parentage & id_a) != 0 && (parentage & id_b) != 0) {
m_matrix[MatrixPredicate.BoundaryBoundary] = 1; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
if (m_matrix[MatrixPredicate.BoundaryBoundary] != 0) {
if (m_topo_graph.getHalfEdgeUserIndex(m_topo_graph
.getHalfEdgePrev(m_topo_graph.getHalfEdgeTwin(half_edge)),
m_visited_index) != 1) {
int cluster = m_topo_graph.getHalfEdgeTo(half_edge);
int clusterParentage = m_topo_graph
.getClusterParentage(cluster);
if ((clusterParentage & id_a) != 0
&& (clusterParentage & id_b) != 0) {
m_matrix[MatrixPredicate.BoundaryBoundary] = 0; // depends on control dependency: [if], data = [0]
}
}
}
} }
|
public class class_name {
public static Iterator<URL> getResources(String resourceName, Class<?> callingClass, boolean aggregate)
throws IOException {
AggregateIterator<URL> iterator = new AggregateIterator<URL>();
iterator.addEnumeration(Thread.currentThread().getContextClassLoader().getResources(resourceName));
if (!iterator.hasNext() || aggregate) {
iterator.addEnumeration(ClassLoaderUtil.class.getClassLoader().getResources(resourceName));
}
if (!iterator.hasNext() || aggregate) {
ClassLoader cl = callingClass.getClassLoader();
if (cl != null) {
iterator.addEnumeration(cl.getResources(resourceName));
}
}
if (!iterator.hasNext() && (resourceName != null)
&& ((resourceName.length() == 0) || (resourceName.charAt(0) != '/'))) {
return getResources('/' + resourceName, callingClass, aggregate);
}
return iterator;
} }
|
public class class_name {
public static Iterator<URL> getResources(String resourceName, Class<?> callingClass, boolean aggregate)
throws IOException {
AggregateIterator<URL> iterator = new AggregateIterator<URL>();
iterator.addEnumeration(Thread.currentThread().getContextClassLoader().getResources(resourceName));
if (!iterator.hasNext() || aggregate) {
iterator.addEnumeration(ClassLoaderUtil.class.getClassLoader().getResources(resourceName));
}
if (!iterator.hasNext() || aggregate) {
ClassLoader cl = callingClass.getClassLoader();
if (cl != null) {
iterator.addEnumeration(cl.getResources(resourceName));
// depends on control dependency: [if], data = [(cl]
}
}
if (!iterator.hasNext() && (resourceName != null)
&& ((resourceName.length() == 0) || (resourceName.charAt(0) != '/'))) {
return getResources('/' + resourceName, callingClass, aggregate);
}
return iterator;
} }
|
public class class_name {
public void encodeData(AsnOutputStream asnOs) throws MAPException {
if (this.cliRestrictionOption == null && this.overrideCategory == null)
throw new MAPException("Error while encoding " + _PrimitiveName
+ ": missing cliRestrictionOption and overrideCategory.");
if (this.cliRestrictionOption != null && this.overrideCategory != null)
throw new MAPException("Error while encoding " + _PrimitiveName
+ ": both cliRestrictionOption and overrideCategory are defined.");
try {
if (this.cliRestrictionOption != null) {
asnOs.writeIntegerData(this.cliRestrictionOption.getCode());
} else {
asnOs.writeIntegerData(this.overrideCategory.getCode());
}
} catch (IOException e) {
throw new MAPException("IOException when encoding " + _PrimitiveName + ": " + e.getMessage(), e);
}
} }
|
public class class_name {
public void encodeData(AsnOutputStream asnOs) throws MAPException {
if (this.cliRestrictionOption == null && this.overrideCategory == null)
throw new MAPException("Error while encoding " + _PrimitiveName
+ ": missing cliRestrictionOption and overrideCategory.");
if (this.cliRestrictionOption != null && this.overrideCategory != null)
throw new MAPException("Error while encoding " + _PrimitiveName
+ ": both cliRestrictionOption and overrideCategory are defined.");
try {
if (this.cliRestrictionOption != null) {
asnOs.writeIntegerData(this.cliRestrictionOption.getCode()); // depends on control dependency: [if], data = [(this.cliRestrictionOption]
} else {
asnOs.writeIntegerData(this.overrideCategory.getCode()); // depends on control dependency: [if], data = [none]
}
} catch (IOException e) {
throw new MAPException("IOException when encoding " + _PrimitiveName + ": " + e.getMessage(), e);
}
} }
|
public class class_name {
protected boolean isWeb() {
ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
if (attributes == null) {
return false;
}
HttpServletRequest request = attributes.getRequest();
if (request == null) {
return false;
}
return true;
} }
|
public class class_name {
protected boolean isWeb() {
ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
if (attributes == null) {
return false; // depends on control dependency: [if], data = [none]
}
HttpServletRequest request = attributes.getRequest();
if (request == null) {
return false; // depends on control dependency: [if], data = [none]
}
return true;
} }
|
public class class_name {
public static double[] getEvenlySpacedRanks(final int n) {
if (n <= 0) {
throw new SketchesArgumentException("n must be > 0");
}
final double[] fractions = new double[n];
fractions[0] = 0.0;
for (int i = 1; i < n; i++) {
fractions[i] = (double) i / (n - 1);
}
if (n > 1) {
fractions[n - 1] = 1.0;
}
return fractions;
} }
|
public class class_name {
public static double[] getEvenlySpacedRanks(final int n) {
if (n <= 0) {
throw new SketchesArgumentException("n must be > 0");
}
final double[] fractions = new double[n];
fractions[0] = 0.0;
for (int i = 1; i < n; i++) {
fractions[i] = (double) i / (n - 1); // depends on control dependency: [for], data = [i]
}
if (n > 1) {
fractions[n - 1] = 1.0; // depends on control dependency: [if], data = [none]
}
return fractions;
} }
|
public class class_name {
protected void drawAt(final Canvas pCanvas, final int pX, final int pY, final float pOrientation) {
final int markerWidth = mIcon.getIntrinsicWidth();
final int markerHeight = mIcon.getIntrinsicHeight();
final int offsetX = pX - Math.round(markerWidth * mAnchorU);
final int offsetY = pY - Math.round(markerHeight * mAnchorV);
mRect.set(offsetX, offsetY, offsetX + markerWidth, offsetY + markerHeight);
RectL.getBounds(mRect, pX, pY, pOrientation, mOrientedMarkerRect);
mDisplayed = Rect.intersects(mOrientedMarkerRect, pCanvas.getClipBounds());
if (!mDisplayed) { // optimization 1: (much faster, depending on the proportions) don't try to display if the Marker is not visible
return;
}
if (mAlpha == 0) {
return;
}
if (pOrientation != 0) { // optimization 2: don't manipulate the Canvas if not needed (about 25% faster) - step 1/2
pCanvas.save();
pCanvas.rotate(pOrientation, pX, pY);
}
if (mIcon instanceof BitmapDrawable) { // optimization 3: (about 15% faster)
final Paint paint;
if (mAlpha == 1) {
paint = null;
} else {
if (mPaint == null) {
mPaint = new Paint();
}
mPaint.setAlpha((int)(mAlpha * 255));
paint = mPaint;
}
pCanvas.drawBitmap(((BitmapDrawable) mIcon).getBitmap(), offsetX, offsetY, paint);
} else {
mIcon.setAlpha((int)(mAlpha*255));
mIcon.setBounds(mRect);
mIcon.draw(pCanvas);
}
if (pOrientation != 0) { // optimization 2: step 2/2
pCanvas.restore();
}
} }
|
public class class_name {
protected void drawAt(final Canvas pCanvas, final int pX, final int pY, final float pOrientation) {
final int markerWidth = mIcon.getIntrinsicWidth();
final int markerHeight = mIcon.getIntrinsicHeight();
final int offsetX = pX - Math.round(markerWidth * mAnchorU);
final int offsetY = pY - Math.round(markerHeight * mAnchorV);
mRect.set(offsetX, offsetY, offsetX + markerWidth, offsetY + markerHeight);
RectL.getBounds(mRect, pX, pY, pOrientation, mOrientedMarkerRect);
mDisplayed = Rect.intersects(mOrientedMarkerRect, pCanvas.getClipBounds());
if (!mDisplayed) { // optimization 1: (much faster, depending on the proportions) don't try to display if the Marker is not visible
return; // depends on control dependency: [if], data = [none]
}
if (mAlpha == 0) {
return; // depends on control dependency: [if], data = [none]
}
if (pOrientation != 0) { // optimization 2: don't manipulate the Canvas if not needed (about 25% faster) - step 1/2
pCanvas.save(); // depends on control dependency: [if], data = [none]
pCanvas.rotate(pOrientation, pX, pY); // depends on control dependency: [if], data = [(pOrientation]
}
if (mIcon instanceof BitmapDrawable) { // optimization 3: (about 15% faster)
final Paint paint;
if (mAlpha == 1) {
paint = null; // depends on control dependency: [if], data = [none]
} else {
if (mPaint == null) {
mPaint = new Paint(); // depends on control dependency: [if], data = [none]
}
mPaint.setAlpha((int)(mAlpha * 255)); // depends on control dependency: [if], data = [(mAlpha]
paint = mPaint; // depends on control dependency: [if], data = [none]
}
pCanvas.drawBitmap(((BitmapDrawable) mIcon).getBitmap(), offsetX, offsetY, paint); // depends on control dependency: [if], data = [none]
} else {
mIcon.setAlpha((int)(mAlpha*255)); // depends on control dependency: [if], data = [none]
mIcon.setBounds(mRect); // depends on control dependency: [if], data = [none]
mIcon.draw(pCanvas); // depends on control dependency: [if], data = [none]
}
if (pOrientation != 0) { // optimization 2: step 2/2
pCanvas.restore(); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public void handleApplicationCommandRequest(SerialMessage serialMessage,
int offset, int endpoint) {
logger.trace("Handle Message Version Request");
logger.debug(String.format("Received Version Request for Node ID = %d", this.getNode().getNodeId()));
int command = serialMessage.getMessagePayloadByte(offset);
switch (command) {
case VERSION_GET:
case VERSION_COMMAND_CLASS_GET:
logger.warn(String.format("Command 0x%02X not implemented.", command));
return;
case VERSION_REPORT:
logger.debug("Process Version Report");
int libraryType = serialMessage.getMessagePayloadByte(offset + 1);
int protocolVersion = serialMessage.getMessagePayloadByte(offset + 2);
int protocolSubVersion = serialMessage.getMessagePayloadByte(offset + 3);
int applicationVersion = serialMessage.getMessagePayloadByte(offset + 4);
int applicationSubVersion = serialMessage.getMessagePayloadByte(offset + 5);
logger.debug(String.format("Node %d Library Type = 0x%02x", this.getNode().getNodeId(), libraryType));
logger.debug(String.format("Node %d Protocol Version = 0x%02x", this.getNode().getNodeId(), protocolVersion));
logger.debug(String.format("Node %d Protocol Sub Version = 0x%02x", this.getNode().getNodeId(), protocolSubVersion));
logger.debug(String.format("Node %d Application Version = 0x%02x", this.getNode().getNodeId(), applicationVersion));
logger.debug(String.format("Node %d Application Sub Version = 0x%02x", this.getNode().getNodeId(), applicationSubVersion));
// Nothing to do with this info, not exactly useful.
break;
case VERSION_COMMAND_CLASS_REPORT:
logger.debug("Process Version Command Class Report");
int commandClassCode = serialMessage.getMessagePayloadByte(offset + 1);
int commandClassVersion = serialMessage.getMessagePayloadByte(offset + 2);
CommandClass commandClass = CommandClass.getCommandClass(commandClassCode);
if (commandClass == null) {
logger.error(String.format("Unsupported command class 0x%02x", commandClassCode));
return;
}
logger.debug(String.format("Node %d Requested Command Class = %s (0x%02x)", this.getNode().getNodeId(), commandClass.getLabel() , commandClassCode));
logger.debug(String.format("Node %d Version = %d", this.getNode().getNodeId(), commandClassVersion));
// The version is set on the command class for this node. By updating the version, extra functionality is unlocked in the command class.
// The messages are backwards compatible, so it's not a problem that there is a slight delay when the command class version is queried on the
// node.
ZWaveCommandClass zwaveCommandClass = this.getNode().getCommandClass(commandClass);
if (zwaveCommandClass == null) {
logger.error(String.format("Unsupported command class %s (0x%02x)", commandClass.getLabel(), commandClassCode));
return;
}
if (commandClassVersion > zwaveCommandClass.getMaxVersion()) {
zwaveCommandClass.setVersion( zwaveCommandClass.getMaxVersion() );
logger.debug(String.format("Node %d Version = %d, version set to maximum supported by the binding. Enabling extra functionality.", this.getNode().getNodeId(), zwaveCommandClass.getMaxVersion()));
} else {
zwaveCommandClass.setVersion( commandClassVersion );
logger.debug(String.format("Node %d Version = %d, version set. Enabling extra functionality.", this.getNode().getNodeId(), commandClassVersion));
}
for (ZWaveCommandClass zCC : this.getNode().getCommandClasses()) {
// wait for all nodes to get/set version information before advancing to the next stage.
if (zCC.getVersion() == 0)
return;
}
// advance node stage;
this.getNode().advanceNodeStage();
break;
default:
logger.warn(String.format("Unsupported Command 0x%02X for command class %s (0x%02X).",
command,
this.getCommandClass().getLabel(),
this.getCommandClass().getKey()));
}
} }
|
public class class_name {
@Override
public void handleApplicationCommandRequest(SerialMessage serialMessage,
int offset, int endpoint) {
logger.trace("Handle Message Version Request");
logger.debug(String.format("Received Version Request for Node ID = %d", this.getNode().getNodeId()));
int command = serialMessage.getMessagePayloadByte(offset);
switch (command) {
case VERSION_GET:
case VERSION_COMMAND_CLASS_GET:
logger.warn(String.format("Command 0x%02X not implemented.", command));
return;
case VERSION_REPORT:
logger.debug("Process Version Report");
int libraryType = serialMessage.getMessagePayloadByte(offset + 1);
int protocolVersion = serialMessage.getMessagePayloadByte(offset + 2);
int protocolSubVersion = serialMessage.getMessagePayloadByte(offset + 3);
int applicationVersion = serialMessage.getMessagePayloadByte(offset + 4);
int applicationSubVersion = serialMessage.getMessagePayloadByte(offset + 5);
logger.debug(String.format("Node %d Library Type = 0x%02x", this.getNode().getNodeId(), libraryType));
logger.debug(String.format("Node %d Protocol Version = 0x%02x", this.getNode().getNodeId(), protocolVersion));
logger.debug(String.format("Node %d Protocol Sub Version = 0x%02x", this.getNode().getNodeId(), protocolSubVersion));
logger.debug(String.format("Node %d Application Version = 0x%02x", this.getNode().getNodeId(), applicationVersion));
logger.debug(String.format("Node %d Application Sub Version = 0x%02x", this.getNode().getNodeId(), applicationSubVersion));
// Nothing to do with this info, not exactly useful.
break;
case VERSION_COMMAND_CLASS_REPORT:
logger.debug("Process Version Command Class Report");
int commandClassCode = serialMessage.getMessagePayloadByte(offset + 1);
int commandClassVersion = serialMessage.getMessagePayloadByte(offset + 2);
CommandClass commandClass = CommandClass.getCommandClass(commandClassCode);
if (commandClass == null) {
logger.error(String.format("Unsupported command class 0x%02x", commandClassCode));
// depends on control dependency: [if], data = [none]
return;
// depends on control dependency: [if], data = [none]
}
logger.debug(String.format("Node %d Requested Command Class = %s (0x%02x)", this.getNode().getNodeId(), commandClass.getLabel() , commandClassCode));
logger.debug(String.format("Node %d Version = %d", this.getNode().getNodeId(), commandClassVersion));
// The version is set on the command class for this node. By updating the version, extra functionality is unlocked in the command class.
// The messages are backwards compatible, so it's not a problem that there is a slight delay when the command class version is queried on the
// node.
ZWaveCommandClass zwaveCommandClass = this.getNode().getCommandClass(commandClass);
if (zwaveCommandClass == null) {
logger.error(String.format("Unsupported command class %s (0x%02x)", commandClass.getLabel(), commandClassCode));
// depends on control dependency: [if], data = [none]
return;
// depends on control dependency: [if], data = [none]
}
if (commandClassVersion > zwaveCommandClass.getMaxVersion()) {
zwaveCommandClass.setVersion( zwaveCommandClass.getMaxVersion() );
// depends on control dependency: [if], data = [none]
logger.debug(String.format("Node %d Version = %d, version set to maximum supported by the binding. Enabling extra functionality.", this.getNode().getNodeId(), zwaveCommandClass.getMaxVersion()));
// depends on control dependency: [if], data = [zwaveCommandClass.getMaxVersion())]
} else {
zwaveCommandClass.setVersion( commandClassVersion );
// depends on control dependency: [if], data = [none]
logger.debug(String.format("Node %d Version = %d, version set. Enabling extra functionality.", this.getNode().getNodeId(), commandClassVersion));
// depends on control dependency: [if], data = [none]
}
for (ZWaveCommandClass zCC : this.getNode().getCommandClasses()) {
// wait for all nodes to get/set version information before advancing to the next stage.
if (zCC.getVersion() == 0)
return;
}
// advance node stage;
this.getNode().advanceNodeStage();
break;
default:
logger.warn(String.format("Unsupported Command 0x%02X for command class %s (0x%02X).",
command,
this.getCommandClass().getLabel(),
this.getCommandClass().getKey()));
}
} }
|
public class class_name {
public void onImageChange(Bitmap bitmap) {
if(bitmap != null) {
Bitmap scaled = bitmapUtils.getScaledBitmap(bitmap, 200, 200);
content.onProfilePictureChange(scaled);
}
} }
|
public class class_name {
public void onImageChange(Bitmap bitmap) {
if(bitmap != null) {
Bitmap scaled = bitmapUtils.getScaledBitmap(bitmap, 200, 200);
content.onProfilePictureChange(scaled); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
Symbol ambiguityError(Symbol m1, Symbol m2) {
if (((m1.flags() | m2.flags()) & CLASH) != 0) {
return (m1.flags() & CLASH) == 0 ? m1 : m2;
} else {
return new AmbiguityError(m1, m2);
}
} }
|
public class class_name {
Symbol ambiguityError(Symbol m1, Symbol m2) {
if (((m1.flags() | m2.flags()) & CLASH) != 0) {
return (m1.flags() & CLASH) == 0 ? m1 : m2; // depends on control dependency: [if], data = [none]
} else {
return new AmbiguityError(m1, m2); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public CmsXmlContainerPage getCacheContainerPage(String key, boolean online) {
try {
m_lock.readLock().lock();
CmsXmlContainerPage retValue;
if (online) {
retValue = m_containerPagesOnline.get(key);
if (LOG.isDebugEnabled()) {
if (retValue == null) {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_DEBUG_CACHE_MISSED_ONLINE_1,
new Object[] {key}));
} else {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_DEBUG_CACHE_MATCHED_ONLINE_2,
new Object[] {key, retValue}));
}
}
} else {
retValue = m_containerPagesOffline.get(key);
if (LOG.isDebugEnabled()) {
if (retValue == null) {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_DEBUG_CACHE_MISSED_OFFLINE_1,
new Object[] {key}));
} else {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_DEBUG_CACHE_MATCHED_OFFLINE_2,
new Object[] {key, retValue}));
}
}
}
if (retValue != null) {
//System.out.println("got cached page: " + retValue.getFile().getRootPath());
}
return retValue;
} finally {
m_lock.readLock().unlock();
}
} }
|
public class class_name {
public CmsXmlContainerPage getCacheContainerPage(String key, boolean online) {
try {
m_lock.readLock().lock(); // depends on control dependency: [try], data = [none]
CmsXmlContainerPage retValue;
if (online) {
retValue = m_containerPagesOnline.get(key); // depends on control dependency: [if], data = [none]
if (LOG.isDebugEnabled()) {
if (retValue == null) {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_DEBUG_CACHE_MISSED_ONLINE_1,
new Object[] {key})); // depends on control dependency: [if], data = [none]
} else {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_DEBUG_CACHE_MATCHED_ONLINE_2,
new Object[] {key, retValue})); // depends on control dependency: [if], data = [none]
}
}
} else {
retValue = m_containerPagesOffline.get(key); // depends on control dependency: [if], data = [none]
if (LOG.isDebugEnabled()) {
if (retValue == null) {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_DEBUG_CACHE_MISSED_OFFLINE_1,
new Object[] {key})); // depends on control dependency: [if], data = [none]
} else {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_DEBUG_CACHE_MATCHED_OFFLINE_2,
new Object[] {key, retValue})); // depends on control dependency: [if], data = [none]
}
}
}
if (retValue != null) {
//System.out.println("got cached page: " + retValue.getFile().getRootPath());
}
return retValue; // depends on control dependency: [try], data = [none]
} finally {
m_lock.readLock().unlock();
}
} }
|
public class class_name {
public void execute(
TransformerImpl transformer)
throws TransformerException
{
XPathContext xctxt = transformer.getXPathContext();
try
{
int sourceNode = xctxt.getCurrentNode();
xctxt.pushCurrentNode(sourceNode);
DTM dtm = xctxt.getDTM(sourceNode);
short nodeType = dtm.getNodeType(sourceNode);
if ((DTM.DOCUMENT_NODE != nodeType) && (DTM.DOCUMENT_FRAGMENT_NODE != nodeType))
{
SerializationHandler rthandler = transformer.getSerializationHandler();
// TODO: Process the use-attribute-sets stuff
ClonerToResultTree.cloneToResultTree(sourceNode, nodeType, dtm,
rthandler, false);
if (DTM.ELEMENT_NODE == nodeType)
{
super.execute(transformer);
SerializerUtils.processNSDecls(rthandler, sourceNode, nodeType, dtm);
transformer.executeChildTemplates(this, true);
String ns = dtm.getNamespaceURI(sourceNode);
String localName = dtm.getLocalName(sourceNode);
transformer.getResultTreeHandler().endElement(ns, localName,
dtm.getNodeName(sourceNode));
}
}
else
{
super.execute(transformer);
transformer.executeChildTemplates(this, true);
}
}
catch(org.xml.sax.SAXException se)
{
throw new TransformerException(se);
}
finally
{
xctxt.popCurrentNode();
}
} }
|
public class class_name {
public void execute(
TransformerImpl transformer)
throws TransformerException
{
XPathContext xctxt = transformer.getXPathContext();
try
{
int sourceNode = xctxt.getCurrentNode();
xctxt.pushCurrentNode(sourceNode);
DTM dtm = xctxt.getDTM(sourceNode);
short nodeType = dtm.getNodeType(sourceNode);
if ((DTM.DOCUMENT_NODE != nodeType) && (DTM.DOCUMENT_FRAGMENT_NODE != nodeType))
{
SerializationHandler rthandler = transformer.getSerializationHandler();
// TODO: Process the use-attribute-sets stuff
ClonerToResultTree.cloneToResultTree(sourceNode, nodeType, dtm,
rthandler, false); // depends on control dependency: [if], data = [none]
if (DTM.ELEMENT_NODE == nodeType)
{
super.execute(transformer); // depends on control dependency: [if], data = [none]
SerializerUtils.processNSDecls(rthandler, sourceNode, nodeType, dtm); // depends on control dependency: [if], data = [none]
transformer.executeChildTemplates(this, true); // depends on control dependency: [if], data = [none]
String ns = dtm.getNamespaceURI(sourceNode);
String localName = dtm.getLocalName(sourceNode);
transformer.getResultTreeHandler().endElement(ns, localName,
dtm.getNodeName(sourceNode)); // depends on control dependency: [if], data = [none]
}
}
else
{
super.execute(transformer); // depends on control dependency: [if], data = [none]
transformer.executeChildTemplates(this, true); // depends on control dependency: [if], data = [none]
}
}
catch(org.xml.sax.SAXException se)
{
throw new TransformerException(se);
}
finally
{
xctxt.popCurrentNode();
}
} }
|
public class class_name {
public void computeType(XListLiteral literal, ITypeComputationState state) {
JvmGenericType listType = findDeclaredType(List.class, state);
if (listType == null) {
handleCollectionTypeNotAvailable(literal, state, List.class);
return;
}
for(ITypeExpectation expectation: state.getExpectations()) {
computeType(literal, listType, expectation, state);
}
} }
|
public class class_name {
public void computeType(XListLiteral literal, ITypeComputationState state) {
JvmGenericType listType = findDeclaredType(List.class, state);
if (listType == null) {
handleCollectionTypeNotAvailable(literal, state, List.class); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
for(ITypeExpectation expectation: state.getExpectations()) {
computeType(literal, listType, expectation, state); // depends on control dependency: [for], data = [expectation]
}
} }
|
public class class_name {
public static boolean init(Object initableObj) {
if (initableObj instanceof Initable) {
((Initable) initableObj).init();
return true;
}
return false;
} }
|
public class class_name {
public static boolean init(Object initableObj) {
if (initableObj instanceof Initable) {
((Initable) initableObj).init(); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
} }
|
public class class_name {
public final HttpClient wiretap(boolean enable) {
if (enable) {
return tcpConfiguration(tcpClient -> tcpClient.bootstrap(b -> BootstrapHandlers.updateLogSupport(b, LOGGING_HANDLER)));
}
else {
return tcpConfiguration(tcpClient -> tcpClient.bootstrap(
b -> BootstrapHandlers.removeConfiguration(b, NettyPipeline.LoggingHandler)));
}
} }
|
public class class_name {
public final HttpClient wiretap(boolean enable) {
if (enable) {
return tcpConfiguration(tcpClient -> tcpClient.bootstrap(b -> BootstrapHandlers.updateLogSupport(b, LOGGING_HANDLER))); // depends on control dependency: [if], data = [none]
}
else {
return tcpConfiguration(tcpClient -> tcpClient.bootstrap(
b -> BootstrapHandlers.removeConfiguration(b, NettyPipeline.LoggingHandler))); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Pure
public static boolean getPreferredRoadInternDrawing() {
final Preferences prefs = Preferences.userNodeForPackage(RoadNetworkLayerConstants.class);
if (prefs != null) {
return prefs.getBoolean("ROAD_INTERN_DRAWING", DEFAULT_ROAD_INTERN_DRAWING); //$NON-NLS-1$
}
return DEFAULT_ROAD_INTERN_DRAWING;
} }
|
public class class_name {
@Pure
public static boolean getPreferredRoadInternDrawing() {
final Preferences prefs = Preferences.userNodeForPackage(RoadNetworkLayerConstants.class);
if (prefs != null) {
return prefs.getBoolean("ROAD_INTERN_DRAWING", DEFAULT_ROAD_INTERN_DRAWING); //$NON-NLS-1$ // depends on control dependency: [if], data = [none]
}
return DEFAULT_ROAD_INTERN_DRAWING;
} }
|
public class class_name {
protected final List<String> getSessionPermissions() {
if (sessionTracker != null) {
Session currentSession = sessionTracker.getSession();
return (currentSession != null) ? currentSession.getPermissions() : null;
}
return null;
} }
|
public class class_name {
protected final List<String> getSessionPermissions() {
if (sessionTracker != null) {
Session currentSession = sessionTracker.getSession();
return (currentSession != null) ? currentSession.getPermissions() : null; // depends on control dependency: [if], data = [null)]
}
return null;
} }
|
public class class_name {
public static String getServerScheme() {
String ret = getLatkeProperty("serverScheme");
if (null == ret) {
final RequestContext requestContext = REQUEST_CONTEXT.get();
if (null != requestContext) {
ret = requestContext.getRequest().getScheme();
} else {
ret = "http";
}
}
return ret;
} }
|
public class class_name {
public static String getServerScheme() {
String ret = getLatkeProperty("serverScheme");
if (null == ret) {
final RequestContext requestContext = REQUEST_CONTEXT.get();
if (null != requestContext) {
ret = requestContext.getRequest().getScheme(); // depends on control dependency: [if], data = [none]
} else {
ret = "http"; // depends on control dependency: [if], data = [none]
}
}
return ret;
} }
|
public class class_name {
public void commit() {
if (checkSession()) {
return;
}
if (log.isDebugEnabled()) {
log.debug(String.format("Committing transactional %s@%x",
sqlSession.getClass().getSimpleName(),
sqlSession.hashCode()));
}
sqlSession.commit();
} }
|
public class class_name {
public void commit() {
if (checkSession()) {
return; // depends on control dependency: [if], data = [none]
}
if (log.isDebugEnabled()) {
log.debug(String.format("Committing transactional %s@%x",
sqlSession.getClass().getSimpleName(),
sqlSession.hashCode())); // depends on control dependency: [if], data = [none]
}
sqlSession.commit();
} }
|
public class class_name {
public String setPropertyIfNull(String propname, String value)
{
String property = super.getProperty(propname);
if (property == null)
{
super.setProperty(propname, value);
return value;
}
else
{
return property;
}
} }
|
public class class_name {
public String setPropertyIfNull(String propname, String value)
{
String property = super.getProperty(propname);
if (property == null)
{
super.setProperty(propname, value); // depends on control dependency: [if], data = [none]
return value; // depends on control dependency: [if], data = [none]
}
else
{
return property; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
protected void addComment(Element element, Content contentTree) {
List<? extends DocTree> tags;
Content span = HtmlTree.SPAN(HtmlStyle.deprecatedLabel, getDeprecatedPhrase(element));
HtmlTree div = new HtmlTree(HtmlTag.DIV);
div.addStyle(HtmlStyle.block);
if (utils.isDeprecated(element)) {
div.addContent(span);
tags = utils.getBlockTags(element, DocTree.Kind.DEPRECATED);
if (!tags.isEmpty())
addInlineDeprecatedComment(element, tags.get(0), div);
contentTree.addContent(div);
} else {
TypeElement encl = utils.getEnclosingTypeElement(element);
while (encl != null) {
if (utils.isDeprecated(encl)) {
div.addContent(span);
contentTree.addContent(div);
break;
}
encl = utils.getEnclosingTypeElement(encl);
}
addSummaryComment(element, contentTree);
}
} }
|
public class class_name {
protected void addComment(Element element, Content contentTree) {
List<? extends DocTree> tags;
Content span = HtmlTree.SPAN(HtmlStyle.deprecatedLabel, getDeprecatedPhrase(element));
HtmlTree div = new HtmlTree(HtmlTag.DIV);
div.addStyle(HtmlStyle.block);
if (utils.isDeprecated(element)) {
div.addContent(span); // depends on control dependency: [if], data = [none]
tags = utils.getBlockTags(element, DocTree.Kind.DEPRECATED); // depends on control dependency: [if], data = [none]
if (!tags.isEmpty())
addInlineDeprecatedComment(element, tags.get(0), div);
contentTree.addContent(div); // depends on control dependency: [if], data = [none]
} else {
TypeElement encl = utils.getEnclosingTypeElement(element);
while (encl != null) {
if (utils.isDeprecated(encl)) {
div.addContent(span); // depends on control dependency: [if], data = [none]
contentTree.addContent(div); // depends on control dependency: [if], data = [none]
break;
}
encl = utils.getEnclosingTypeElement(encl); // depends on control dependency: [while], data = [(encl]
}
addSummaryComment(element, contentTree); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public void marshall(Shipment shipment, ProtocolMarshaller protocolMarshaller) {
if (shipment == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(shipment.getStatus(), STATUS_BINDING);
protocolMarshaller.marshall(shipment.getTrackingNumber(), TRACKINGNUMBER_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(Shipment shipment, ProtocolMarshaller protocolMarshaller) {
if (shipment == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(shipment.getStatus(), STATUS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(shipment.getTrackingNumber(), TRACKINGNUMBER_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
protected Optional<String> getDatabaseName(Path path) {
if (!this.props.contains(HIVE_DATABASE_NAME) && !this.props.contains(HIVE_DATABASE_REGEX)) {
return Optional.<String> absent();
}
return Optional.<String> of(
this.dbNamePrefix + getDatabaseOrTableName(path, HIVE_DATABASE_NAME, HIVE_DATABASE_REGEX, this.dbNamePattern)
+ this.dbNameSuffix);
} }
|
public class class_name {
protected Optional<String> getDatabaseName(Path path) {
if (!this.props.contains(HIVE_DATABASE_NAME) && !this.props.contains(HIVE_DATABASE_REGEX)) {
return Optional.<String> absent(); // depends on control dependency: [if], data = [none]
}
return Optional.<String> of(
this.dbNamePrefix + getDatabaseOrTableName(path, HIVE_DATABASE_NAME, HIVE_DATABASE_REGEX, this.dbNamePattern)
+ this.dbNameSuffix);
} }
|
public class class_name {
public void setComplianceLevel( String complianceLevel )
{
if ( AjcHelper.isValidComplianceLevel( complianceLevel ) )
{
ajcOptions.add( "-source" );
ajcOptions.add( complianceLevel );
}
} }
|
public class class_name {
public void setComplianceLevel( String complianceLevel )
{
if ( AjcHelper.isValidComplianceLevel( complianceLevel ) )
{
ajcOptions.add( "-source" ); // depends on control dependency: [if], data = [none]
ajcOptions.add( complianceLevel ); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public String getProperty(Key key) {
final String envVariable = getPropertyFromEnvironment(key);
if (envVariable != null) {
return envVariable;
}
if (key.getDefaultValue() == null) {
return properties.getProperty(key.getPropertyName());
} else {
return properties.getProperty(key.getPropertyName(), String.valueOf(key.getDefaultValue()));
}
} }
|
public class class_name {
public String getProperty(Key key) {
final String envVariable = getPropertyFromEnvironment(key);
if (envVariable != null) {
return envVariable; // depends on control dependency: [if], data = [none]
}
if (key.getDefaultValue() == null) {
return properties.getProperty(key.getPropertyName()); // depends on control dependency: [if], data = [none]
} else {
return properties.getProperty(key.getPropertyName(), String.valueOf(key.getDefaultValue())); // depends on control dependency: [if], data = [(key.getDefaultValue()]
}
} }
|
public class class_name {
@RequestMapping(path = "/{productId}", method=RequestMethod.GET)
public ResponseEntity<Product> getCustomer(@PathVariable("productId") Long productId){
Product product = productService.getProduct(productId);
if(product != null){
return new ResponseEntity<Product>(product, HttpStatus.OK);
}
return new ResponseEntity<Product>(HttpStatus.NOT_FOUND);
} }
|
public class class_name {
@RequestMapping(path = "/{productId}", method=RequestMethod.GET)
public ResponseEntity<Product> getCustomer(@PathVariable("productId") Long productId){
Product product = productService.getProduct(productId);
if(product != null){
return new ResponseEntity<Product>(product, HttpStatus.OK); // depends on control dependency: [if], data = [(product]
}
return new ResponseEntity<Product>(HttpStatus.NOT_FOUND);
} }
|
public class class_name {
@Override
@FFDCIgnore({ EmptyRegistryException.class, NoSuchRegistryException.class })
public void write() throws IOException {
JSONObject payload = new JSONObject();
for (String registryName : Constants.REGISTRY_NAMES_LIST) {
try {
payload.put(registryName, getMetricsAsJson(registryName));
} catch (NoSuchRegistryException e) { // Ignore
} catch (EmptyRegistryException e) { // Ignore
}
}
serialize(payload);
} }
|
public class class_name {
@Override
@FFDCIgnore({ EmptyRegistryException.class, NoSuchRegistryException.class })
public void write() throws IOException {
JSONObject payload = new JSONObject();
for (String registryName : Constants.REGISTRY_NAMES_LIST) {
try {
payload.put(registryName, getMetricsAsJson(registryName)); // depends on control dependency: [try], data = [none]
} catch (NoSuchRegistryException e) { // Ignore
} catch (EmptyRegistryException e) { // Ignore // depends on control dependency: [catch], data = [none]
} // depends on control dependency: [catch], data = [none]
}
serialize(payload);
} }
|
public class class_name {
public static SoyValue plus(SoyValue operand0, SoyValue operand1) {
if (operand0 instanceof IntegerData && operand1 instanceof IntegerData) {
return IntegerData.forValue(operand0.longValue() + operand1.longValue());
} else if (operand0 instanceof NumberData && operand1 instanceof NumberData) {
return FloatData.forValue(operand0.numberValue() + operand1.numberValue());
} else {
// String concatenation is the fallback for other types (like in JS). Use the implemented
// coerceToString() for the type.
return StringData.forValue(operand0.coerceToString() + operand1.coerceToString());
}
} }
|
public class class_name {
public static SoyValue plus(SoyValue operand0, SoyValue operand1) {
if (operand0 instanceof IntegerData && operand1 instanceof IntegerData) {
return IntegerData.forValue(operand0.longValue() + operand1.longValue()); // depends on control dependency: [if], data = [none]
} else if (operand0 instanceof NumberData && operand1 instanceof NumberData) {
return FloatData.forValue(operand0.numberValue() + operand1.numberValue()); // depends on control dependency: [if], data = [none]
} else {
// String concatenation is the fallback for other types (like in JS). Use the implemented
// coerceToString() for the type.
return StringData.forValue(operand0.coerceToString() + operand1.coerceToString()); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public DataPoint set(DataPoint another) {
if (type == Type.NONE) {
_cloneFrom(another);
} else {
if (another.type != Type.NONE) {
set(another.value());
}
}
return this;
} }
|
public class class_name {
public DataPoint set(DataPoint another) {
if (type == Type.NONE) {
_cloneFrom(another); // depends on control dependency: [if], data = [none]
} else {
if (another.type != Type.NONE) {
set(another.value()); // depends on control dependency: [if], data = [none]
}
}
return this;
} }
|
public class class_name {
@Override
public void notLeader() {
LOG.warn("Server instance with server id {} is removed as leader", serverId);
serviceState.becomingPassive();
for (ActiveStateChangeHandler handler: activeStateChangeHandlers) {
try {
handler.instanceIsPassive();
} catch (AtlasException e) {
LOG.error("Error while reacting to passive state.", e);
}
}
serviceState.setPassive();
} }
|
public class class_name {
@Override
public void notLeader() {
LOG.warn("Server instance with server id {} is removed as leader", serverId);
serviceState.becomingPassive();
for (ActiveStateChangeHandler handler: activeStateChangeHandlers) {
try {
handler.instanceIsPassive(); // depends on control dependency: [try], data = [none]
} catch (AtlasException e) {
LOG.error("Error while reacting to passive state.", e);
} // depends on control dependency: [catch], data = [none]
}
serviceState.setPassive();
} }
|
public class class_name {
public void listenBucketNotification(String bucketName, String prefix, String suffix, String[] events,
BucketEventListener eventCallback)
throws InvalidBucketNameException, NoSuchAlgorithmException, InsufficientDataException, IOException,
InvalidKeyException, NoResponseException, XmlPullParserException, ErrorResponseException,
InternalException {
Multimap<String,String> queryParamMap = HashMultimap.create();
queryParamMap.put("prefix", prefix);
queryParamMap.put("suffix", suffix);
for (String event: events) {
queryParamMap.put("events", event);
}
String bodyContent = "";
Scanner scanner = null;
HttpResponse response = null;
ObjectMapper mapper = new ObjectMapper();
try {
response = executeReq(Method.GET, getRegion(bucketName),
bucketName, "", null, queryParamMap, null, 0);
scanner = new Scanner(response.body().charStream());
scanner.useDelimiter("\n");
while (scanner.hasNext()) {
bodyContent = scanner.next().trim();
if (bodyContent.equals("")) {
continue;
}
NotificationInfo ni = mapper.readValue(bodyContent, NotificationInfo.class);
eventCallback.updateEvent(ni);
}
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw e;
} finally {
if (response != null) {
response.body().close();
}
if (scanner != null) {
scanner.close();
}
}
} }
|
public class class_name {
public void listenBucketNotification(String bucketName, String prefix, String suffix, String[] events,
BucketEventListener eventCallback)
throws InvalidBucketNameException, NoSuchAlgorithmException, InsufficientDataException, IOException,
InvalidKeyException, NoResponseException, XmlPullParserException, ErrorResponseException,
InternalException {
Multimap<String,String> queryParamMap = HashMultimap.create();
queryParamMap.put("prefix", prefix);
queryParamMap.put("suffix", suffix);
for (String event: events) {
queryParamMap.put("events", event);
}
String bodyContent = "";
Scanner scanner = null;
HttpResponse response = null;
ObjectMapper mapper = new ObjectMapper();
try {
response = executeReq(Method.GET, getRegion(bucketName),
bucketName, "", null, queryParamMap, null, 0);
scanner = new Scanner(response.body().charStream());
scanner.useDelimiter("\n");
while (scanner.hasNext()) {
bodyContent = scanner.next().trim();
if (bodyContent.equals("")) {
continue;
}
NotificationInfo ni = mapper.readValue(bodyContent, NotificationInfo.class);
eventCallback.updateEvent(ni);
}
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw e;
} finally {
if (response != null) {
response.body().close(); // depends on control dependency: [if], data = [none]
}
if (scanner != null) {
scanner.close(); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public String getSQL() {
StringBuffer sb = new StringBuffer(64);
switch (opType) {
case OpTypes.VALUE :
if (valueData == null) {
return Tokens.T_NULL;
}
return dataType.convertToSQLString(valueData);
case OpTypes.ROW :
sb.append('(');
for (int i = 0; i < nodes.length; i++) {
sb.append(nodes[i].getSQL());
if (i < nodes.length - 1) {
sb.append(',');
}
}
sb.append(')');
return sb.toString();
//
case OpTypes.TABLE :
for (int i = 0; i < nodes.length; i++) {
sb.append(nodes[i].getSQL());
if (i < nodes.length - 1) {
sb.append(',');
}
}
return sb.toString();
}
switch (opType) {
case OpTypes.ROW_SUBQUERY :
case OpTypes.TABLE_SUBQUERY :
/*
buf.append('(');
buf.append(subSelect.getSQL());
buf.append(')');
*/
break;
default :
throw Error.runtimeError(ErrorCode.U_S0500, "Expression");
}
return sb.toString();
} }
|
public class class_name {
public String getSQL() {
StringBuffer sb = new StringBuffer(64);
switch (opType) {
case OpTypes.VALUE :
if (valueData == null) {
return Tokens.T_NULL; // depends on control dependency: [if], data = [none]
}
return dataType.convertToSQLString(valueData);
case OpTypes.ROW :
sb.append('(');
for (int i = 0; i < nodes.length; i++) {
sb.append(nodes[i].getSQL()); // depends on control dependency: [for], data = [i]
if (i < nodes.length - 1) {
sb.append(','); // depends on control dependency: [if], data = [none]
}
}
sb.append(')');
return sb.toString();
//
case OpTypes.TABLE :
for (int i = 0; i < nodes.length; i++) {
sb.append(nodes[i].getSQL()); // depends on control dependency: [for], data = [i]
if (i < nodes.length - 1) {
sb.append(','); // depends on control dependency: [if], data = [none]
}
}
return sb.toString();
}
switch (opType) {
case OpTypes.ROW_SUBQUERY :
case OpTypes.TABLE_SUBQUERY :
/*
buf.append('(');
buf.append(subSelect.getSQL());
buf.append(')');
*/
break;
default :
throw Error.runtimeError(ErrorCode.U_S0500, "Expression");
}
return sb.toString();
} }
|
public class class_name {
public List<LoginConfigType<WebAppType<T>>> getAllLoginConfig()
{
List<LoginConfigType<WebAppType<T>>> list = new ArrayList<LoginConfigType<WebAppType<T>>>();
List<Node> nodeList = childNode.get("login-config");
for(Node node: nodeList)
{
LoginConfigType<WebAppType<T>> type = new LoginConfigTypeImpl<WebAppType<T>>(this, "login-config", childNode, node);
list.add(type);
}
return list;
} }
|
public class class_name {
public List<LoginConfigType<WebAppType<T>>> getAllLoginConfig()
{
List<LoginConfigType<WebAppType<T>>> list = new ArrayList<LoginConfigType<WebAppType<T>>>();
List<Node> nodeList = childNode.get("login-config");
for(Node node: nodeList)
{
LoginConfigType<WebAppType<T>> type = new LoginConfigTypeImpl<WebAppType<T>>(this, "login-config", childNode, node);
list.add(type); // depends on control dependency: [for], data = [none]
}
return list;
} }
|
public class class_name {
public static void putLibrary(String key, String path, Object value) {
if (key.startsWith(DicLibrary.DEFAULT)) {
DicLibrary.put(key, path, (Forest) value);
} else if (key.startsWith(StopLibrary.DEFAULT)) {
StopLibrary.put(key, path, (StopRecognition) value);
} else if (key.startsWith(SynonymsLibrary.DEFAULT)) {
SynonymsLibrary.put(key, path, (SmartForest) value);
} else if (key.startsWith(AmbiguityLibrary.DEFAULT)) {
AmbiguityLibrary.put(key, path, (Forest) value);
} else if (key.startsWith(CrfLibrary.DEFAULT)) {
CrfLibrary.put(key, path, (SplitWord) value);
} else {
throw new LibraryException(key + " type err must start with dic,stop,ambiguity,synonyms");
}
ENV.put(key, path);
} }
|
public class class_name {
public static void putLibrary(String key, String path, Object value) {
if (key.startsWith(DicLibrary.DEFAULT)) {
DicLibrary.put(key, path, (Forest) value); // depends on control dependency: [if], data = [none]
} else if (key.startsWith(StopLibrary.DEFAULT)) {
StopLibrary.put(key, path, (StopRecognition) value); // depends on control dependency: [if], data = [none]
} else if (key.startsWith(SynonymsLibrary.DEFAULT)) {
SynonymsLibrary.put(key, path, (SmartForest) value); // depends on control dependency: [if], data = [none]
} else if (key.startsWith(AmbiguityLibrary.DEFAULT)) {
AmbiguityLibrary.put(key, path, (Forest) value); // depends on control dependency: [if], data = [none]
} else if (key.startsWith(CrfLibrary.DEFAULT)) {
CrfLibrary.put(key, path, (SplitWord) value); // depends on control dependency: [if], data = [none]
} else {
throw new LibraryException(key + " type err must start with dic,stop,ambiguity,synonyms");
}
ENV.put(key, path);
} }
|
public class class_name {
public Integer getNumberOfTokens(String field, int docId) {
if (fieldReferences.containsKey(field)) {
IndexDoc doc = getDoc(field, docId);
if (doc != null) {
return doc.size;
}
}
return null;
} }
|
public class class_name {
public Integer getNumberOfTokens(String field, int docId) {
if (fieldReferences.containsKey(field)) {
IndexDoc doc = getDoc(field, docId);
if (doc != null) {
return doc.size; // depends on control dependency: [if], data = [none]
}
}
return null;
} }
|
public class class_name {
public static Counter isDeniedID(String identity) {
if(deniedID!=null) {
return deniedID.get(identity);
}
return null;
} }
|
public class class_name {
public static Counter isDeniedID(String identity) {
if(deniedID!=null) {
return deniedID.get(identity); // depends on control dependency: [if], data = [none]
}
return null;
} }
|
public class class_name {
private Model doModel(Type type, Model model) {
Map<String, Property> properties = model.getProperties();
if (properties != null) {
BeanDescription desc = Json.mapper().getSerializationConfig()
.introspect(Json.mapper().constructType(type));
for (BeanPropertyDefinition beanProperty : desc.findProperties()) {
Property property = properties.get(beanProperty.getName());
if (property != null) {
property.setRequired(beanProperty.isRequired());
}
}
}
return model;
} }
|
public class class_name {
private Model doModel(Type type, Model model) {
Map<String, Property> properties = model.getProperties();
if (properties != null) {
BeanDescription desc = Json.mapper().getSerializationConfig()
.introspect(Json.mapper().constructType(type));
for (BeanPropertyDefinition beanProperty : desc.findProperties()) {
Property property = properties.get(beanProperty.getName());
if (property != null) {
property.setRequired(beanProperty.isRequired()); // depends on control dependency: [if], data = [none]
}
}
}
return model;
} }
|
public class class_name {
public DataForm getDataFormToSend() {
if (isSubmitType()) {
// Create a new DataForm that contains only the answered fields
DataForm dataFormToSend = new DataForm(getType());
for (FormField field : getFields()) {
if (!field.getValues().isEmpty()) {
dataFormToSend.addField(field);
}
}
return dataFormToSend;
}
return dataForm;
} }
|
public class class_name {
public DataForm getDataFormToSend() {
if (isSubmitType()) {
// Create a new DataForm that contains only the answered fields
DataForm dataFormToSend = new DataForm(getType());
for (FormField field : getFields()) {
if (!field.getValues().isEmpty()) {
dataFormToSend.addField(field); // depends on control dependency: [if], data = [none]
}
}
return dataFormToSend; // depends on control dependency: [if], data = [none]
}
return dataForm;
} }
|
public class class_name {
static @Nonnull List<ThreadLock> getSynchronizers(final ThreadInfo threadInfo) {
final LockInfo[] synchronizers = threadInfo.getLockedSynchronizers();
final List<ThreadLock> locks = new ArrayList<ThreadLock>(synchronizers.length);
for (LockInfo info: synchronizers) {
locks.add(getSynchronizer(info));
}
return locks;
} }
|
public class class_name {
static @Nonnull List<ThreadLock> getSynchronizers(final ThreadInfo threadInfo) {
final LockInfo[] synchronizers = threadInfo.getLockedSynchronizers();
final List<ThreadLock> locks = new ArrayList<ThreadLock>(synchronizers.length);
for (LockInfo info: synchronizers) {
locks.add(getSynchronizer(info)); // depends on control dependency: [for], data = [info]
}
return locks;
} }
|
public class class_name {
private byte[] safelyGetContent(HttpRequestContext request) {
ContainerRequest containerRequest = (ContainerRequest) request;
ByteArrayOutputStream out = new ByteArrayOutputStream();
InputStream in = containerRequest.getEntityInputStream();
try {
ReaderWriter.writeTo(in, out);
byte[] content = out.toByteArray();
// Reset the input stream so that it can be read again by another filter or resource
containerRequest.setEntityInputStream(new ByteArrayInputStream(content));
return content;
} catch (IOException ex) {
throw new ContainerException(ex);
}
} }
|
public class class_name {
private byte[] safelyGetContent(HttpRequestContext request) {
ContainerRequest containerRequest = (ContainerRequest) request;
ByteArrayOutputStream out = new ByteArrayOutputStream();
InputStream in = containerRequest.getEntityInputStream();
try {
ReaderWriter.writeTo(in, out); // depends on control dependency: [try], data = [none]
byte[] content = out.toByteArray();
// Reset the input stream so that it can be read again by another filter or resource
containerRequest.setEntityInputStream(new ByteArrayInputStream(content)); // depends on control dependency: [try], data = [none]
return content; // depends on control dependency: [try], data = [none]
} catch (IOException ex) {
throw new ContainerException(ex);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public List<Object> getStickyEvents(Class<?> eventType, String eventId) {
final List<Object> result = new ArrayList<>();
synchronized (stickyEventMap) {
for (Map.Entry<Object, String> entry : stickyEventMap.entrySet()) {
if (N.equals(entry.getValue(), eventId) && eventType.isAssignableFrom(entry.getKey().getClass())) {
result.add(entry.getKey());
}
}
}
return result;
} }
|
public class class_name {
public List<Object> getStickyEvents(Class<?> eventType, String eventId) {
final List<Object> result = new ArrayList<>();
synchronized (stickyEventMap) {
for (Map.Entry<Object, String> entry : stickyEventMap.entrySet()) {
if (N.equals(entry.getValue(), eventId) && eventType.isAssignableFrom(entry.getKey().getClass())) {
result.add(entry.getKey()); // depends on control dependency: [if], data = [none]
}
}
}
return result;
} }
|
public class class_name {
public void showReportProblem() {
if (!isEnabled) {
return;
}
final Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
show(getContext().getString(R.string.report_problem),
NavigationConstants.ALERT_VIEW_PROBLEM_DURATION, true);
}
}, THREE_SECOND_DELAY_IN_MILLIS);
} }
|
public class class_name {
public void showReportProblem() {
if (!isEnabled) {
return; // depends on control dependency: [if], data = [none]
}
final Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
show(getContext().getString(R.string.report_problem),
NavigationConstants.ALERT_VIEW_PROBLEM_DURATION, true);
}
}, THREE_SECOND_DELAY_IN_MILLIS);
} }
|
public class class_name {
@SuppressWarnings("unchecked")
private <T> TypeSerializer<T> getSerializer(Class<T> type) {
TypeSerializer<T> serializer = (TypeSerializer<T>) serializers.get(type);
if (serializer == null) {
TypeSerializerFactory factory = registry.factory(type);
if (factory != null) {
serializer = (TypeSerializer<T>) factory.createSerializer(type);
serializers.put(type, serializer);
}
}
return serializer;
} }
|
public class class_name {
@SuppressWarnings("unchecked")
private <T> TypeSerializer<T> getSerializer(Class<T> type) {
TypeSerializer<T> serializer = (TypeSerializer<T>) serializers.get(type);
if (serializer == null) {
TypeSerializerFactory factory = registry.factory(type);
if (factory != null) {
serializer = (TypeSerializer<T>) factory.createSerializer(type); // depends on control dependency: [if], data = [none]
serializers.put(type, serializer); // depends on control dependency: [if], data = [none]
}
}
return serializer;
} }
|
public class class_name {
public void collateFinalState() {
boolean seenCanceled = false;
for ( T download : subTransfers ) {
if ( download.getState() == TransferState.Failed ) {
setState(TransferState.Failed);
return;
} else if ( download.getState() == TransferState.Canceled ) {
seenCanceled = true;
}
}
if ( seenCanceled )
setState(TransferState.Canceled);
else
setState(TransferState.Completed);
} }
|
public class class_name {
public void collateFinalState() {
boolean seenCanceled = false;
for ( T download : subTransfers ) {
if ( download.getState() == TransferState.Failed ) {
setState(TransferState.Failed);
// depends on control dependency: [if], data = [none]
return;
// depends on control dependency: [if], data = [none]
} else if ( download.getState() == TransferState.Canceled ) {
seenCanceled = true;
// depends on control dependency: [if], data = [none]
}
}
if ( seenCanceled )
setState(TransferState.Canceled);
else
setState(TransferState.Completed);
} }
|
public class class_name {
public static SSLEngine getOutboundSSLEngine(SSLContext context,
SSLLinkConfig config, String host, int port, SSLConnectionLink connLink) {
// PK46069 - use engine that allows session id re-use
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.entry(tc, "getOutboundSSLEngine, host=" + host + ", port=" + port);
}
SSLEngine engine = context.createSSLEngine(host, port);
configureEngine(engine, FlowType.OUTBOUND, config, connLink);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.exit(tc, "getOutboundSSLEngine, hc=" + engine.hashCode());
}
return engine;
} }
|
public class class_name {
public static SSLEngine getOutboundSSLEngine(SSLContext context,
SSLLinkConfig config, String host, int port, SSLConnectionLink connLink) {
// PK46069 - use engine that allows session id re-use
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.entry(tc, "getOutboundSSLEngine, host=" + host + ", port=" + port); // depends on control dependency: [if], data = [none]
}
SSLEngine engine = context.createSSLEngine(host, port);
configureEngine(engine, FlowType.OUTBOUND, config, connLink);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.exit(tc, "getOutboundSSLEngine, hc=" + engine.hashCode()); // depends on control dependency: [if], data = [none]
}
return engine;
} }
|
public class class_name {
public static base_responses add(nitro_service client, dnstxtrec resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
dnstxtrec addresources[] = new dnstxtrec[resources.length];
for (int i=0;i<resources.length;i++){
addresources[i] = new dnstxtrec();
addresources[i].domain = resources[i].domain;
addresources[i].String = resources[i].String;
addresources[i].ttl = resources[i].ttl;
}
result = add_bulk_request(client, addresources);
}
return result;
} }
|
public class class_name {
public static base_responses add(nitro_service client, dnstxtrec resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
dnstxtrec addresources[] = new dnstxtrec[resources.length];
for (int i=0;i<resources.length;i++){
addresources[i] = new dnstxtrec(); // depends on control dependency: [for], data = [i]
addresources[i].domain = resources[i].domain; // depends on control dependency: [for], data = [i]
addresources[i].String = resources[i].String; // depends on control dependency: [for], data = [i]
addresources[i].ttl = resources[i].ttl; // depends on control dependency: [for], data = [i]
}
result = add_bulk_request(client, addresources);
}
return result;
} }
|
public class class_name {
public T checked(boolean checked) {
if (view instanceof CompoundButton) {
CompoundButton cb = (CompoundButton) view;
cb.setChecked(checked);
}
return self();
} }
|
public class class_name {
public T checked(boolean checked) {
if (view instanceof CompoundButton) {
CompoundButton cb = (CompoundButton) view;
cb.setChecked(checked); // depends on control dependency: [if], data = [none]
}
return self();
} }
|
public class class_name {
@Override
public void run() {
try {
// Server thread pool
// Start with minWorkerThreads, expand till maxWorkerThreads and reject subsequent requests
String threadPoolName = "HiveServer2-HttpHandler-Pool";
ThreadPoolExecutor executorService = new ThreadPoolExecutor(minWorkerThreads, maxWorkerThreads,
workerKeepAliveTime, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(),
new ThreadFactoryWithGarbageCleanup(threadPoolName));
ExecutorThreadPool threadPool = new ExecutorThreadPool(executorService);
// HTTP Server
httpServer = new org.eclipse.jetty.server.Server(threadPool);
// Connector configs
ConnectionFactory[] connectionFactories;
boolean useSsl = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL);
String schemeName = useSsl ? "https" : "http";
// Change connector if SSL is used
if (useSsl) {
String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim();
String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf,
HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname);
if (keyStorePath.isEmpty()) {
throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname
+ " Not configured for SSL connection");
}
SslContextFactory sslContextFactory = new SslContextFactory();
String[] excludedProtocols = hiveConf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(",");
LOG.info("HTTP Server SSL: adding excluded protocols: " + Arrays.toString(excludedProtocols));
sslContextFactory.addExcludeProtocols(excludedProtocols);
LOG.info("HTTP Server SSL: SslContextFactory.getExcludeProtocols = " +
Arrays.toString(sslContextFactory.getExcludeProtocols()));
sslContextFactory.setKeyStorePath(keyStorePath);
sslContextFactory.setKeyStorePassword(keyStorePassword);
connectionFactories = AbstractConnectionFactory.getFactories(
sslContextFactory, new HttpConnectionFactory());
} else {
connectionFactories = new ConnectionFactory[] { new HttpConnectionFactory() };
}
ServerConnector connector = new ServerConnector(
httpServer,
null,
// Call this full constructor to set this, which forces daemon threads:
new ScheduledExecutorScheduler("HiveServer2-HttpHandler-JettyScheduler", true),
null,
-1,
-1,
connectionFactories);
connector.setPort(portNum);
// Linux:yes, Windows:no
connector.setReuseAddress(!Shell.WINDOWS);
int maxIdleTime = (int) hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME,
TimeUnit.MILLISECONDS);
connector.setIdleTimeout(maxIdleTime);
httpServer.addConnector(connector);
// Thrift configs
hiveAuthFactory = new HiveAuthFactory(hiveConf);
TProcessor processor = new TCLIService.Processor<Iface>(this);
TProtocolFactory protocolFactory = new TBinaryProtocol.Factory();
// Set during the init phase of HiveServer2 if auth mode is kerberos
// UGI for the hive/_HOST (kerberos) principal
UserGroupInformation serviceUGI = cliService.getServiceUGI();
// UGI for the http/_HOST (SPNego) principal
UserGroupInformation httpUGI = cliService.getHttpUGI();
String authType = hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION);
TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory, authType,
serviceUGI, httpUGI);
// Context handler
final ServletContextHandler context = new ServletContextHandler(
ServletContextHandler.SESSIONS);
context.setContextPath("/");
String httpPath = getHttpPath(hiveConf
.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH));
httpServer.setHandler(context);
context.addServlet(new ServletHolder(thriftHttpServlet), httpPath);
// TODO: check defaults: maxTimeout, keepalive, maxBodySize, bodyRecieveDuration, etc.
// Finally, start the server
httpServer.start();
String msg = "Started " + ThriftHttpCLIService.class.getSimpleName() + " in " + schemeName
+ " mode on port " + connector.getLocalPort()+ " path=" + httpPath + " with " + minWorkerThreads + "..."
+ maxWorkerThreads + " worker threads";
LOG.info(msg);
httpServer.join();
} catch (Throwable t) {
LOG.fatal(
"Error starting HiveServer2: could not start "
+ ThriftHttpCLIService.class.getSimpleName(), t);
System.exit(-1);
}
} }
|
public class class_name {
@Override
public void run() {
try {
// Server thread pool
// Start with minWorkerThreads, expand till maxWorkerThreads and reject subsequent requests
String threadPoolName = "HiveServer2-HttpHandler-Pool";
ThreadPoolExecutor executorService = new ThreadPoolExecutor(minWorkerThreads, maxWorkerThreads,
workerKeepAliveTime, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(),
new ThreadFactoryWithGarbageCleanup(threadPoolName));
ExecutorThreadPool threadPool = new ExecutorThreadPool(executorService);
// HTTP Server
httpServer = new org.eclipse.jetty.server.Server(threadPool); // depends on control dependency: [try], data = [none]
// Connector configs
ConnectionFactory[] connectionFactories;
boolean useSsl = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL);
String schemeName = useSsl ? "https" : "http";
// Change connector if SSL is used
if (useSsl) {
String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim();
String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf,
HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname);
if (keyStorePath.isEmpty()) {
throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname
+ " Not configured for SSL connection");
}
SslContextFactory sslContextFactory = new SslContextFactory();
String[] excludedProtocols = hiveConf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(",");
LOG.info("HTTP Server SSL: adding excluded protocols: " + Arrays.toString(excludedProtocols)); // depends on control dependency: [if], data = [none]
sslContextFactory.addExcludeProtocols(excludedProtocols); // depends on control dependency: [if], data = [none]
LOG.info("HTTP Server SSL: SslContextFactory.getExcludeProtocols = " +
Arrays.toString(sslContextFactory.getExcludeProtocols())); // depends on control dependency: [if], data = [none]
sslContextFactory.setKeyStorePath(keyStorePath); // depends on control dependency: [if], data = [none]
sslContextFactory.setKeyStorePassword(keyStorePassword); // depends on control dependency: [if], data = [none]
connectionFactories = AbstractConnectionFactory.getFactories(
sslContextFactory, new HttpConnectionFactory()); // depends on control dependency: [if], data = [none]
} else {
connectionFactories = new ConnectionFactory[] { new HttpConnectionFactory() }; // depends on control dependency: [if], data = [none]
}
ServerConnector connector = new ServerConnector(
httpServer,
null,
// Call this full constructor to set this, which forces daemon threads:
new ScheduledExecutorScheduler("HiveServer2-HttpHandler-JettyScheduler", true),
null,
-1,
-1,
connectionFactories);
connector.setPort(portNum); // depends on control dependency: [try], data = [none]
// Linux:yes, Windows:no
connector.setReuseAddress(!Shell.WINDOWS); // depends on control dependency: [try], data = [none]
int maxIdleTime = (int) hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME,
TimeUnit.MILLISECONDS);
connector.setIdleTimeout(maxIdleTime); // depends on control dependency: [try], data = [none]
httpServer.addConnector(connector); // depends on control dependency: [try], data = [none]
// Thrift configs
hiveAuthFactory = new HiveAuthFactory(hiveConf); // depends on control dependency: [try], data = [none]
TProcessor processor = new TCLIService.Processor<Iface>(this);
TProtocolFactory protocolFactory = new TBinaryProtocol.Factory();
// Set during the init phase of HiveServer2 if auth mode is kerberos
// UGI for the hive/_HOST (kerberos) principal
UserGroupInformation serviceUGI = cliService.getServiceUGI();
// UGI for the http/_HOST (SPNego) principal
UserGroupInformation httpUGI = cliService.getHttpUGI();
String authType = hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION);
TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory, authType,
serviceUGI, httpUGI);
// Context handler
final ServletContextHandler context = new ServletContextHandler(
ServletContextHandler.SESSIONS);
context.setContextPath("/"); // depends on control dependency: [try], data = [none]
String httpPath = getHttpPath(hiveConf
.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH));
httpServer.setHandler(context); // depends on control dependency: [try], data = [none]
context.addServlet(new ServletHolder(thriftHttpServlet), httpPath); // depends on control dependency: [try], data = [none]
// TODO: check defaults: maxTimeout, keepalive, maxBodySize, bodyRecieveDuration, etc.
// Finally, start the server
httpServer.start(); // depends on control dependency: [try], data = [none]
String msg = "Started " + ThriftHttpCLIService.class.getSimpleName() + " in " + schemeName
+ " mode on port " + connector.getLocalPort()+ " path=" + httpPath + " with " + minWorkerThreads + "..."
+ maxWorkerThreads + " worker threads"; // depends on control dependency: [try], data = [none]
LOG.info(msg); // depends on control dependency: [try], data = [none]
httpServer.join(); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
LOG.fatal(
"Error starting HiveServer2: could not start "
+ ThriftHttpCLIService.class.getSimpleName(), t);
System.exit(-1);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private long getFixedDateJan1(BaseCalendar.Date date, long fixedDate) {
assert date.getNormalizedYear() == gregorianCutoverYear ||
date.getNormalizedYear() == gregorianCutoverYearJulian;
if (gregorianCutoverYear != gregorianCutoverYearJulian) {
if (fixedDate >= gregorianCutoverDate) {
// Dates before the cutover date don't exist
// in the same (Gregorian) year. So, no
// January 1 exists in the year. Use the
// cutover date as the first day of the year.
return gregorianCutoverDate;
}
}
// January 1 of the normalized year should exist.
BaseCalendar juliancal = getJulianCalendarSystem();
return juliancal.getFixedDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1, null);
} }
|
public class class_name {
private long getFixedDateJan1(BaseCalendar.Date date, long fixedDate) {
assert date.getNormalizedYear() == gregorianCutoverYear ||
date.getNormalizedYear() == gregorianCutoverYearJulian;
if (gregorianCutoverYear != gregorianCutoverYearJulian) {
if (fixedDate >= gregorianCutoverDate) {
// Dates before the cutover date don't exist
// in the same (Gregorian) year. So, no
// January 1 exists in the year. Use the
// cutover date as the first day of the year.
return gregorianCutoverDate; // depends on control dependency: [if], data = [none]
}
}
// January 1 of the normalized year should exist.
BaseCalendar juliancal = getJulianCalendarSystem();
return juliancal.getFixedDate(date.getNormalizedYear(), BaseCalendar.JANUARY, 1, null);
} }
|
public class class_name {
public void marshall(ServerReplicationParameters serverReplicationParameters, ProtocolMarshaller protocolMarshaller) {
if (serverReplicationParameters == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(serverReplicationParameters.getSeedTime(), SEEDTIME_BINDING);
protocolMarshaller.marshall(serverReplicationParameters.getFrequency(), FREQUENCY_BINDING);
protocolMarshaller.marshall(serverReplicationParameters.getRunOnce(), RUNONCE_BINDING);
protocolMarshaller.marshall(serverReplicationParameters.getLicenseType(), LICENSETYPE_BINDING);
protocolMarshaller.marshall(serverReplicationParameters.getNumberOfRecentAmisToKeep(), NUMBEROFRECENTAMISTOKEEP_BINDING);
protocolMarshaller.marshall(serverReplicationParameters.getEncrypted(), ENCRYPTED_BINDING);
protocolMarshaller.marshall(serverReplicationParameters.getKmsKeyId(), KMSKEYID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(ServerReplicationParameters serverReplicationParameters, ProtocolMarshaller protocolMarshaller) {
if (serverReplicationParameters == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(serverReplicationParameters.getSeedTime(), SEEDTIME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(serverReplicationParameters.getFrequency(), FREQUENCY_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(serverReplicationParameters.getRunOnce(), RUNONCE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(serverReplicationParameters.getLicenseType(), LICENSETYPE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(serverReplicationParameters.getNumberOfRecentAmisToKeep(), NUMBEROFRECENTAMISTOKEEP_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(serverReplicationParameters.getEncrypted(), ENCRYPTED_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(serverReplicationParameters.getKmsKeyId(), KMSKEYID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
@Override
public void removeByCPRuleId(long CPRuleId) {
for (CPRuleUserSegmentRel cpRuleUserSegmentRel : findByCPRuleId(
CPRuleId, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) {
remove(cpRuleUserSegmentRel);
}
} }
|
public class class_name {
@Override
public void removeByCPRuleId(long CPRuleId) {
for (CPRuleUserSegmentRel cpRuleUserSegmentRel : findByCPRuleId(
CPRuleId, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) {
remove(cpRuleUserSegmentRel); // depends on control dependency: [for], data = [cpRuleUserSegmentRel]
}
} }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.