code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
|---|---|
public class class_name {
private void generateInvokeDynamic(MutableClass mutableClass, MethodNode originalMethod,
String originalMethodName, RobolectricGeneratorAdapter generator) {
Handle original =
new Handle(getTag(originalMethod), mutableClass.classType.getInternalName(),
originalMethod.name, originalMethod.desc);
if (generator.isStatic()) {
generator.loadArgs();
generator.invokeDynamic(originalMethodName, originalMethod.desc, BOOTSTRAP_STATIC, original);
} else {
String desc = "(" + mutableClass.classType.getDescriptor() + originalMethod.desc.substring(1);
generator.loadThis();
generator.loadArgs();
generator.invokeDynamic(originalMethodName, desc, BOOTSTRAP, original);
}
generator.returnValue();
} }
|
public class class_name {
private void generateInvokeDynamic(MutableClass mutableClass, MethodNode originalMethod,
String originalMethodName, RobolectricGeneratorAdapter generator) {
Handle original =
new Handle(getTag(originalMethod), mutableClass.classType.getInternalName(),
originalMethod.name, originalMethod.desc);
if (generator.isStatic()) {
generator.loadArgs(); // depends on control dependency: [if], data = [none]
generator.invokeDynamic(originalMethodName, originalMethod.desc, BOOTSTRAP_STATIC, original); // depends on control dependency: [if], data = [none]
} else {
String desc = "(" + mutableClass.classType.getDescriptor() + originalMethod.desc.substring(1);
generator.loadThis();
generator.loadArgs();
generator.invokeDynamic(originalMethodName, desc, BOOTSTRAP, original);
}
generator.returnValue();
} }
|
public class class_name {
public synchronized List<GoogleCloudStorageItemInfo> getList(
String bucket, @Nullable String objectNamePrefix) {
PrefixKey key = new PrefixKey(bucket, objectNamePrefix);
Entry<PrefixKey, CacheValue<Object>> entry = getParentEntry(prefixMap, key);
if (entry == null) {
return null;
}
if (isExpired(entry.getValue())) {
cleanupLists(key);
return null;
}
return listItems(key);
} }
|
public class class_name {
public synchronized List<GoogleCloudStorageItemInfo> getList(
String bucket, @Nullable String objectNamePrefix) {
PrefixKey key = new PrefixKey(bucket, objectNamePrefix);
Entry<PrefixKey, CacheValue<Object>> entry = getParentEntry(prefixMap, key);
if (entry == null) {
return null; // depends on control dependency: [if], data = [none]
}
if (isExpired(entry.getValue())) {
cleanupLists(key); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
return listItems(key);
} }
|
public class class_name {
protected void addOutlineEntry(StringBuilder outline, int level, String sectionNumber, String title,
String sectionId, boolean htmlOutput) {
if (htmlOutput) {
indent(outline, level - 1, " "); //$NON-NLS-1$
outline.append("<li><a href=\"#"); //$NON-NLS-1$
outline.append(sectionId);
outline.append("\">"); //$NON-NLS-1$
if (isAutoSectionNumbering() && !Strings.isEmpty(sectionNumber)) {
outline.append(sectionNumber).append(". "); //$NON-NLS-1$
}
outline.append(title);
outline.append("</a></li>"); //$NON-NLS-1$
} else {
final String prefix = "*"; //$NON-NLS-1$
final String entry;
outline.append("> "); //$NON-NLS-1$
indent(outline, level - 1, "\t"); //$NON-NLS-1$
if (isAutoSectionNumbering()) {
entry = MessageFormat.format(getOutlineEntryFormat(), prefix,
Strings.emptyIfNull(sectionNumber), title, sectionId);
} else {
entry = MessageFormat.format(getOutlineEntryFormat(), prefix, title, sectionId);
}
outline.append(entry);
}
outline.append("\n"); //$NON-NLS-1$
} }
|
public class class_name {
protected void addOutlineEntry(StringBuilder outline, int level, String sectionNumber, String title,
String sectionId, boolean htmlOutput) {
if (htmlOutput) {
indent(outline, level - 1, " "); //$NON-NLS-1$ // depends on control dependency: [if], data = [none]
outline.append("<li><a href=\"#"); //$NON-NLS-1$ // depends on control dependency: [if], data = [none]
outline.append(sectionId); // depends on control dependency: [if], data = [none]
outline.append("\">"); //$NON-NLS-1$ // depends on control dependency: [if], data = [none]
if (isAutoSectionNumbering() && !Strings.isEmpty(sectionNumber)) {
outline.append(sectionNumber).append(". "); //$NON-NLS-1$ // depends on control dependency: [if], data = [none]
}
outline.append(title); // depends on control dependency: [if], data = [none]
outline.append("</a></li>"); //$NON-NLS-1$ // depends on control dependency: [if], data = [none]
} else {
final String prefix = "*"; //$NON-NLS-1$
final String entry;
outline.append("> "); //$NON-NLS-1$ // depends on control dependency: [if], data = [none]
indent(outline, level - 1, "\t"); //$NON-NLS-1$ // depends on control dependency: [if], data = [none]
if (isAutoSectionNumbering()) {
entry = MessageFormat.format(getOutlineEntryFormat(), prefix,
Strings.emptyIfNull(sectionNumber), title, sectionId); // depends on control dependency: [if], data = [none]
} else {
entry = MessageFormat.format(getOutlineEntryFormat(), prefix, title, sectionId); // depends on control dependency: [if], data = [none]
}
outline.append(entry); // depends on control dependency: [if], data = [none]
}
outline.append("\n"); //$NON-NLS-1$
} }
|
public class class_name {
public void writeToIndex(Object object) {
Assert.notNull(object, "no object data given");
lazyInitWriting();
BytesRef serialized = bulkEntryWriter.writeBulkEntry(object);
if (serialized != null) {
doWriteToIndex(serialized);
}
} }
|
public class class_name {
public void writeToIndex(Object object) {
Assert.notNull(object, "no object data given");
lazyInitWriting();
BytesRef serialized = bulkEntryWriter.writeBulkEntry(object);
if (serialized != null) {
doWriteToIndex(serialized); // depends on control dependency: [if], data = [(serialized]
}
} }
|
public class class_name {
public int getFrequency(final CharSequence word) {
if (!dictionaryMetadata.isFrequencyIncluded()) {
return 0;
}
final byte separator = dictionaryMetadata.getSeparator();
try {
byteBuffer = charSequenceToBytes(word);
} catch (UnmappableInputException e) {
return 0;
}
final MatchResult match = matcher.match(matchResult, byteBuffer.array(), 0, byteBuffer.remaining(), rootNode);
if (match.kind == SEQUENCE_IS_A_PREFIX) {
final int arc = fsa.getArc(match.node, separator);
if (arc != 0 && !fsa.isArcFinal(arc)) {
finalStatesIterator.restartFrom(fsa.getEndNode(arc));
if (finalStatesIterator.hasNext()) {
final ByteBuffer bb = finalStatesIterator.next();
final byte[] ba = bb.array();
final int bbSize = bb.remaining();
//the last byte contains the frequency after a separator
return ba[bbSize - 1] - FIRST_RANGE_CODE;
}
}
}
return 0;
} }
|
public class class_name {
public int getFrequency(final CharSequence word) {
if (!dictionaryMetadata.isFrequencyIncluded()) {
return 0;
// depends on control dependency: [if], data = [none]
}
final byte separator = dictionaryMetadata.getSeparator();
try {
byteBuffer = charSequenceToBytes(word);
// depends on control dependency: [try], data = [none]
} catch (UnmappableInputException e) {
return 0;
}
// depends on control dependency: [catch], data = [none]
final MatchResult match = matcher.match(matchResult, byteBuffer.array(), 0, byteBuffer.remaining(), rootNode);
if (match.kind == SEQUENCE_IS_A_PREFIX) {
final int arc = fsa.getArc(match.node, separator);
if (arc != 0 && !fsa.isArcFinal(arc)) {
finalStatesIterator.restartFrom(fsa.getEndNode(arc));
// depends on control dependency: [if], data = [(arc]
if (finalStatesIterator.hasNext()) {
final ByteBuffer bb = finalStatesIterator.next();
final byte[] ba = bb.array();
final int bbSize = bb.remaining();
//the last byte contains the frequency after a separator
return ba[bbSize - 1] - FIRST_RANGE_CODE;
// depends on control dependency: [if], data = [none]
}
}
}
return 0;
} }
|
public class class_name {
public static InstancedConfiguration mergeHadoopConfiguration(
org.apache.hadoop.conf.Configuration hadoopConf, AlluxioProperties alluxioProps) {
// Load Alluxio configuration if any and merge to the one in Alluxio file system
// Push Alluxio configuration to the Job configuration
Properties alluxioConfProperties = new Properties();
// Load any Alluxio configuration parameters existing in the Hadoop configuration.
for (Map.Entry<String, String> entry : hadoopConf) {
String propertyName = entry.getKey();
if (PropertyKey.isValid(propertyName)) {
alluxioConfProperties.put(propertyName, entry.getValue());
}
}
LOG.info("Loading Alluxio properties from Hadoop configuration: {}", alluxioConfProperties);
// Merge the relevant Hadoop configuration into Alluxio's configuration.
alluxioProps.merge(alluxioConfProperties, Source.RUNTIME);
// Creting a new instanced configuration from an AlluxioProperties object isn't expensive.
InstancedConfiguration mergedConf = new InstancedConfiguration(alluxioProps);
mergedConf.validate();
return mergedConf;
} }
|
public class class_name {
public static InstancedConfiguration mergeHadoopConfiguration(
org.apache.hadoop.conf.Configuration hadoopConf, AlluxioProperties alluxioProps) {
// Load Alluxio configuration if any and merge to the one in Alluxio file system
// Push Alluxio configuration to the Job configuration
Properties alluxioConfProperties = new Properties();
// Load any Alluxio configuration parameters existing in the Hadoop configuration.
for (Map.Entry<String, String> entry : hadoopConf) {
String propertyName = entry.getKey();
if (PropertyKey.isValid(propertyName)) {
alluxioConfProperties.put(propertyName, entry.getValue()); // depends on control dependency: [if], data = [none]
}
}
LOG.info("Loading Alluxio properties from Hadoop configuration: {}", alluxioConfProperties);
// Merge the relevant Hadoop configuration into Alluxio's configuration.
alluxioProps.merge(alluxioConfProperties, Source.RUNTIME);
// Creting a new instanced configuration from an AlluxioProperties object isn't expensive.
InstancedConfiguration mergedConf = new InstancedConfiguration(alluxioProps);
mergedConf.validate();
return mergedConf;
} }
|
public class class_name {
public HThriftClient open() {
if ( isOpen() ) {
throw new IllegalStateException("Open called on already open connection. You should not have gotten here.");
}
if ( log.isDebugEnabled() ) {
log.debug("Creating a new thrift connection to {}", cassandraHost);
}
TSocket socket;
try {
socket = params == null ?
new TSocket(cassandraHost.getHost(), cassandraHost.getPort(), timeout)
: TSSLTransportFactory.getClientSocket(cassandraHost.getHost(), cassandraHost.getPort(), timeout, params);
} catch (TTransportException e) {
throw new HectorTransportException("Could not get client socket: ", e);
}
if ( cassandraHost.getUseSocketKeepalive() ) {
try {
socket.getSocket().setKeepAlive(true);
} catch (SocketException se) {
throw new HectorTransportException("Could not set SO_KEEPALIVE on socket: ", se);
}
}
transport = maybeWrapWithTFramedTransport(socket);
// If using SSL, the socket will already be connected, and TFramedTransport and
// TSocket just wind up calling socket.isConnected(), so check this before calling
// open() to avoid a "Socket already connected" error.
if (!transport.isOpen()) {
try {
transport.open();
} catch (TTransportException e) {
// Thrift exceptions aren't very good in reporting, so we have to catch the exception here and
// add details to it.
log.debug("Unable to open transport to " + cassandraHost.getName());
//clientMonitor.incCounter(Counter.CONNECT_ERROR);
throw new HectorTransportException("Unable to open transport to " + cassandraHost.getName() +" , " +
e.getLocalizedMessage(), e);
}
}
return this;
} }
|
public class class_name {
public HThriftClient open() {
if ( isOpen() ) {
throw new IllegalStateException("Open called on already open connection. You should not have gotten here.");
}
if ( log.isDebugEnabled() ) {
log.debug("Creating a new thrift connection to {}", cassandraHost); // depends on control dependency: [if], data = [none]
}
TSocket socket;
try {
socket = params == null ?
new TSocket(cassandraHost.getHost(), cassandraHost.getPort(), timeout)
: TSSLTransportFactory.getClientSocket(cassandraHost.getHost(), cassandraHost.getPort(), timeout, params); // depends on control dependency: [try], data = [none]
} catch (TTransportException e) {
throw new HectorTransportException("Could not get client socket: ", e);
} // depends on control dependency: [catch], data = [none]
if ( cassandraHost.getUseSocketKeepalive() ) {
try {
socket.getSocket().setKeepAlive(true); // depends on control dependency: [try], data = [none]
} catch (SocketException se) {
throw new HectorTransportException("Could not set SO_KEEPALIVE on socket: ", se);
} // depends on control dependency: [catch], data = [none]
}
transport = maybeWrapWithTFramedTransport(socket);
// If using SSL, the socket will already be connected, and TFramedTransport and
// TSocket just wind up calling socket.isConnected(), so check this before calling
// open() to avoid a "Socket already connected" error.
if (!transport.isOpen()) {
try {
transport.open(); // depends on control dependency: [try], data = [none]
} catch (TTransportException e) {
// Thrift exceptions aren't very good in reporting, so we have to catch the exception here and
// add details to it.
log.debug("Unable to open transport to " + cassandraHost.getName());
//clientMonitor.incCounter(Counter.CONNECT_ERROR);
throw new HectorTransportException("Unable to open transport to " + cassandraHost.getName() +" , " +
e.getLocalizedMessage(), e);
} // depends on control dependency: [catch], data = [none]
}
return this;
} }
|
public class class_name {
public int[] toArray(int[] a) {
if (response == null) {
throw new IllegalArgumentException("The dataset has no response values.");
}
if (response.getType() != Attribute.Type.NOMINAL) {
throw new IllegalArgumentException("The response variable is not nominal.");
}
int m = data.size();
if (a.length < m) {
a = new int[m];
}
for (int i = 0; i < m; i++) {
Datum<SparseArray> datum = get(i);
if (Double.isNaN(datum.y)) {
a[i] = Integer.MIN_VALUE;
} else {
a[i] = (int) get(i).y;
}
}
for (int i = m; i < a.length; i++) {
a[i] = Integer.MIN_VALUE;
}
return a;
} }
|
public class class_name {
public int[] toArray(int[] a) {
if (response == null) {
throw new IllegalArgumentException("The dataset has no response values.");
}
if (response.getType() != Attribute.Type.NOMINAL) {
throw new IllegalArgumentException("The response variable is not nominal.");
}
int m = data.size();
if (a.length < m) {
a = new int[m]; // depends on control dependency: [if], data = [none]
}
for (int i = 0; i < m; i++) {
Datum<SparseArray> datum = get(i);
if (Double.isNaN(datum.y)) {
a[i] = Integer.MIN_VALUE; // depends on control dependency: [if], data = [none]
} else {
a[i] = (int) get(i).y; // depends on control dependency: [if], data = [none]
}
}
for (int i = m; i < a.length; i++) {
a[i] = Integer.MIN_VALUE; // depends on control dependency: [for], data = [i]
}
return a;
} }
|
public class class_name {
@Override
public Set<EndpointPair<N>> edges() {
return new AbstractSet<EndpointPair<N>>() {
@Override
public UnmodifiableIterator<EndpointPair<N>> iterator() {
return EndpointPairIterator.of(AbstractGraph.this);
}
@Override
public int size() {
return Ints.saturatedCast(edgeCount());
}
@Override
public boolean contains(@Nullable Object obj) {
if (!(obj instanceof EndpointPair)) {
return false;
}
EndpointPair<?> endpointPair = (EndpointPair<?>) obj;
return isDirected() == endpointPair.isOrdered()
&& nodes().contains(endpointPair.nodeU())
&& successors(endpointPair.nodeU()).contains(endpointPair.nodeV());
}
};
} }
|
public class class_name {
@Override
public Set<EndpointPair<N>> edges() {
return new AbstractSet<EndpointPair<N>>() {
@Override
public UnmodifiableIterator<EndpointPair<N>> iterator() {
return EndpointPairIterator.of(AbstractGraph.this);
}
@Override
public int size() {
return Ints.saturatedCast(edgeCount());
}
@Override
public boolean contains(@Nullable Object obj) {
if (!(obj instanceof EndpointPair)) {
return false; // depends on control dependency: [if], data = [none]
}
EndpointPair<?> endpointPair = (EndpointPair<?>) obj;
return isDirected() == endpointPair.isOrdered()
&& nodes().contains(endpointPair.nodeU())
&& successors(endpointPair.nodeU()).contains(endpointPair.nodeV());
}
};
} }
|
public class class_name {
synchronized int getCommitType(long timestamp) {
RowActionBase action = this;
int type = ACTION_NONE;
do {
if (action.commitTimestamp == timestamp) {
type = action.type;
}
action = action.next;
} while (action != null);
return type;
} }
|
public class class_name {
synchronized int getCommitType(long timestamp) {
RowActionBase action = this;
int type = ACTION_NONE;
do {
if (action.commitTimestamp == timestamp) {
type = action.type; // depends on control dependency: [if], data = [none]
}
action = action.next;
} while (action != null);
return type;
} }
|
public class class_name {
@Override
public boolean accept(File pathname) {
boolean accepted = super.accept(pathname);
if (accepted) {
final File parentDir = pathname.getParentFile();
accepted = parentDir != null && parentDir.getName().equals(SPECIFICATIONS);
}
return accepted;
} }
|
public class class_name {
@Override
public boolean accept(File pathname) {
boolean accepted = super.accept(pathname);
if (accepted) {
final File parentDir = pathname.getParentFile();
accepted = parentDir != null && parentDir.getName().equals(SPECIFICATIONS); // depends on control dependency: [if], data = [none]
}
return accepted;
} }
|
public class class_name {
public static Value valueFromAst(String astLiteral) {
// we use the parser to give us the AST elements as if we defined an inputType
String toParse = "input X { x : String = " + astLiteral + "}";
try {
Document doc = new Parser().parseDocument(toParse);
InputObjectTypeDefinition inputType = (InputObjectTypeDefinition) doc.getDefinitions().get(0);
InputValueDefinition inputValueDefinition = inputType.getInputValueDefinitions().get(0);
return inputValueDefinition.getDefaultValue();
} catch (Exception e) {
return Assert.assertShouldNeverHappen("valueFromAst of '%s' failed because of '%s'", astLiteral, e.getMessage());
}
} }
|
public class class_name {
public static Value valueFromAst(String astLiteral) {
// we use the parser to give us the AST elements as if we defined an inputType
String toParse = "input X { x : String = " + astLiteral + "}";
try {
Document doc = new Parser().parseDocument(toParse);
InputObjectTypeDefinition inputType = (InputObjectTypeDefinition) doc.getDefinitions().get(0);
InputValueDefinition inputValueDefinition = inputType.getInputValueDefinitions().get(0);
return inputValueDefinition.getDefaultValue(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
return Assert.assertShouldNeverHappen("valueFromAst of '%s' failed because of '%s'", astLiteral, e.getMessage());
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
protected ChallengeResponse parseCredentials(final String cookieValue)
{
// 1) Decode Base64 string
final byte[] encrypted = Base64.decode(cookieValue);
if(encrypted == null)
{
this.log.error("Cannot decode cookie credentials : {}", cookieValue);
}
// 2) Decrypt the credentials
try
{
final String decrypted =
CryptoUtils.decrypt(this.getEncryptAlgorithm(), this.getEncryptSecretKey(), encrypted);
// 3) Parse the decrypted cookie value
final int lastSlash = decrypted.lastIndexOf('/');
final String[] indexes = decrypted.substring(lastSlash + 1).split(",");
final int identifierIndex = Integer.parseInt(indexes[0]);
final int secretIndex = Integer.parseInt(indexes[1]);
// 4) Create the challenge response
final ChallengeResponse cr = new ChallengeResponse(this.getScheme());
cr.setRawValue(cookieValue);
cr.setTimeIssued(Long.parseLong(decrypted.substring(0, identifierIndex)));
cr.setIdentifier(decrypted.substring(identifierIndex + 1, secretIndex));
cr.setSecret(decrypted.substring(secretIndex + 1, lastSlash));
return cr;
}
catch(final Exception e)
{
this.log.info("Unable to decrypt cookie credentials");
this.log.trace("Unable to decrypt cookie credentials: Stacktrace:", e);
return null;
}
} }
|
public class class_name {
protected ChallengeResponse parseCredentials(final String cookieValue)
{
// 1) Decode Base64 string
final byte[] encrypted = Base64.decode(cookieValue);
if(encrypted == null)
{
this.log.error("Cannot decode cookie credentials : {}", cookieValue); // depends on control dependency: [if], data = [none]
}
// 2) Decrypt the credentials
try
{
final String decrypted =
CryptoUtils.decrypt(this.getEncryptAlgorithm(), this.getEncryptSecretKey(), encrypted);
// 3) Parse the decrypted cookie value
final int lastSlash = decrypted.lastIndexOf('/');
final String[] indexes = decrypted.substring(lastSlash + 1).split(",");
final int identifierIndex = Integer.parseInt(indexes[0]);
final int secretIndex = Integer.parseInt(indexes[1]);
// 4) Create the challenge response
final ChallengeResponse cr = new ChallengeResponse(this.getScheme());
cr.setRawValue(cookieValue); // depends on control dependency: [try], data = [none]
cr.setTimeIssued(Long.parseLong(decrypted.substring(0, identifierIndex))); // depends on control dependency: [try], data = [none]
cr.setIdentifier(decrypted.substring(identifierIndex + 1, secretIndex)); // depends on control dependency: [try], data = [none]
cr.setSecret(decrypted.substring(secretIndex + 1, lastSlash)); // depends on control dependency: [try], data = [none]
return cr; // depends on control dependency: [try], data = [none]
}
catch(final Exception e)
{
this.log.info("Unable to decrypt cookie credentials");
this.log.trace("Unable to decrypt cookie credentials: Stacktrace:", e);
return null;
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static <T> boolean containsNo(Collection<T> baseList, Collection<T> valueList) {
for (T v : valueList) {
if (baseList.contains(v)) {
return false;
}
}
return true;
} }
|
public class class_name {
public static <T> boolean containsNo(Collection<T> baseList, Collection<T> valueList) {
for (T v : valueList) {
if (baseList.contains(v)) {
return false;
// depends on control dependency: [if], data = [none]
}
}
return true;
} }
|
public class class_name {
public static String formatSQL(String sql) {
if (sql == null || sql.length() == 0)
return sql;
StringBuilder sb = new StringBuilder();
char[] chars = sql.toCharArray();
boolean addedSpace = false;
for (char c : chars) {
if (isInvisibleChar(c)) {
if (!addedSpace) {
sb.append(" ");
addedSpace = true;
}
} else {
sb.append(c);
addedSpace = false;
}
}
sb.append(" ");
return sb.toString();
} }
|
public class class_name {
public static String formatSQL(String sql) {
if (sql == null || sql.length() == 0)
return sql;
StringBuilder sb = new StringBuilder();
char[] chars = sql.toCharArray();
boolean addedSpace = false;
for (char c : chars) {
if (isInvisibleChar(c)) {
if (!addedSpace) {
sb.append(" "); // depends on control dependency: [if], data = [none]
addedSpace = true; // depends on control dependency: [if], data = [none]
}
} else {
sb.append(c); // depends on control dependency: [if], data = [none]
addedSpace = false; // depends on control dependency: [if], data = [none]
}
}
sb.append(" ");
return sb.toString();
} }
|
public class class_name {
protected Map<String, String> getElementAttributes() {
// Preserve order of attributes
Map<String, String> attrs = new HashMap<>();
if (this.getPriority() != null) {
attrs.put("priority", this.getPriority().toString());
}
if (this.getTimeout() != null) {
attrs.put("timeout", this.getTimeout().toString());
}
return attrs;
} }
|
public class class_name {
protected Map<String, String> getElementAttributes() {
// Preserve order of attributes
Map<String, String> attrs = new HashMap<>();
if (this.getPriority() != null) {
attrs.put("priority", this.getPriority().toString()); // depends on control dependency: [if], data = [none]
}
if (this.getTimeout() != null) {
attrs.put("timeout", this.getTimeout().toString()); // depends on control dependency: [if], data = [none]
}
return attrs;
} }
|
public class class_name {
@Override
@Trivial
public void open() throws ClassSource_Exception {
String methodName = "open";
if ( tc.isEntryEnabled() ) {
String msg = MessageFormat.format(
"[ {0} ] State [ {1} ]",
new Object[] { getHashText(), Integer.valueOf(opens) });
Tr.entry(tc, methodName, msg);
}
if ( (opens < 0) ||
((opens == 0) && (jarFile != null)) ||
((opens > 0) && (jarFile == null)) ) {
Tr.warning(tc, "ANNO_CLASSSOURCE_JAR_STATE_BAD", getHashText(), getJarPath(), Integer.valueOf(opens));
String eMsg = "[ " + getHashText() + " ]" +
" Failed to open [ " + getJarPath() + " ]" +
" Count of opens [ " + opens + " ]" +
" Jar state [ " + jarFile + " ]";
throw getFactory().newClassSourceException(eMsg);
}
opens++;
if ( jarFile == null ) {
try {
jarFile = UtilImpl_FileUtils.createJarFile(jarPath); // throws IOException
} catch ( IOException e ) {
Tr.warning(tc, "ANNO_CLASSSOURCE_OPEN4_EXCEPTION", getHashText(), jarPath);
String eMsg = "[ " + getHashText() + " ] Failed to open [ " + jarPath + " ]";
throw getFactory().wrapIntoClassSourceException(CLASS_NAME, methodName, eMsg, e);
}
if ( tc.isEntryEnabled() ) {
Tr.exit(tc, methodName, MessageFormat.format("[ {0} ] RETURN (new open)", getHashText()));
}
} else {
if ( tc.isEntryEnabled() ) {
Tr.exit(tc, methodName, MessageFormat.format("[ {0} ] RETURN (already open)", getHashText()));
}
}
} }
|
public class class_name {
@Override
@Trivial
public void open() throws ClassSource_Exception {
String methodName = "open";
if ( tc.isEntryEnabled() ) {
String msg = MessageFormat.format(
"[ {0} ] State [ {1} ]",
new Object[] { getHashText(), Integer.valueOf(opens) });
Tr.entry(tc, methodName, msg);
}
if ( (opens < 0) ||
((opens == 0) && (jarFile != null)) ||
((opens > 0) && (jarFile == null)) ) {
Tr.warning(tc, "ANNO_CLASSSOURCE_JAR_STATE_BAD", getHashText(), getJarPath(), Integer.valueOf(opens));
String eMsg = "[ " + getHashText() + " ]" +
" Failed to open [ " + getJarPath() + " ]" +
" Count of opens [ " + opens + " ]" +
" Jar state [ " + jarFile + " ]";
throw getFactory().newClassSourceException(eMsg);
}
opens++;
if ( jarFile == null ) {
try {
jarFile = UtilImpl_FileUtils.createJarFile(jarPath); // throws IOException // depends on control dependency: [try], data = [none]
} catch ( IOException e ) {
Tr.warning(tc, "ANNO_CLASSSOURCE_OPEN4_EXCEPTION", getHashText(), jarPath);
String eMsg = "[ " + getHashText() + " ] Failed to open [ " + jarPath + " ]";
throw getFactory().wrapIntoClassSourceException(CLASS_NAME, methodName, eMsg, e);
} // depends on control dependency: [catch], data = [none]
if ( tc.isEntryEnabled() ) {
Tr.exit(tc, methodName, MessageFormat.format("[ {0} ] RETURN (new open)", getHashText())); // depends on control dependency: [if], data = [none]
}
} else {
if ( tc.isEntryEnabled() ) {
Tr.exit(tc, methodName, MessageFormat.format("[ {0} ] RETURN (already open)", getHashText())); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public Object callFunctionWithoutNamedValues(PageContext pc, Object coll, Collection.Key key, Object[] args, boolean noNull, Object defaultValue) {
// MUST make an independent impl for performance reasons
try {
if (!noNull || NullSupportHelper.full(pc)) return callFunctionWithoutNamedValues(pc, coll, key, args);
Object obj = callFunctionWithoutNamedValues(pc, coll, key, args);
return obj == null ? defaultValue : obj;
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
return defaultValue;
}
} }
|
public class class_name {
public Object callFunctionWithoutNamedValues(PageContext pc, Object coll, Collection.Key key, Object[] args, boolean noNull, Object defaultValue) {
// MUST make an independent impl for performance reasons
try {
if (!noNull || NullSupportHelper.full(pc)) return callFunctionWithoutNamedValues(pc, coll, key, args);
Object obj = callFunctionWithoutNamedValues(pc, coll, key, args);
return obj == null ? defaultValue : obj; // depends on control dependency: [try], data = [none]
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
return defaultValue;
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public String getText() {
StringBuilder sb = new StringBuilder();
for (AnalyzedTokenReadings element : tokens) {
sb.append(element.getToken());
}
return sb.toString();
} }
|
public class class_name {
public String getText() {
StringBuilder sb = new StringBuilder();
for (AnalyzedTokenReadings element : tokens) {
sb.append(element.getToken()); // depends on control dependency: [for], data = [element]
}
return sb.toString();
} }
|
public class class_name {
public static ImageLoader getInstance() {
if (instance == null) {
synchronized (ImageLoader.class) {
if (instance == null) {
instance = new ImageLoader();
}
}
}
return instance;
} }
|
public class class_name {
public static ImageLoader getInstance() {
if (instance == null) {
synchronized (ImageLoader.class) { // depends on control dependency: [if], data = [none]
if (instance == null) {
instance = new ImageLoader(); // depends on control dependency: [if], data = [none]
}
}
}
return instance;
} }
|
public class class_name {
public InternalContext createSubContext(VariantIndexer[] indexers, InternalContext localContext, int varSize) {
Object[][] myParentScopes = this.parentScopes;
//cal the new-context's parent-scopes
Object[][] scopes;
if (myParentScopes == null) {
scopes = new Object[][]{this.vars};
} else {
scopes = new Object[myParentScopes.length + 1][];
scopes[0] = this.vars;
System.arraycopy(myParentScopes, 0, scopes, 1, myParentScopes.length);
}
InternalContext newContext = new InternalContext(template, localContext.out, Vars.EMPTY,
indexers, varSize, scopes);
newContext.localContext = localContext;
return newContext;
} }
|
public class class_name {
public InternalContext createSubContext(VariantIndexer[] indexers, InternalContext localContext, int varSize) {
Object[][] myParentScopes = this.parentScopes;
//cal the new-context's parent-scopes
Object[][] scopes;
if (myParentScopes == null) {
scopes = new Object[][]{this.vars}; // depends on control dependency: [if], data = [none]
} else {
scopes = new Object[myParentScopes.length + 1][]; // depends on control dependency: [if], data = [none]
scopes[0] = this.vars; // depends on control dependency: [if], data = [none]
System.arraycopy(myParentScopes, 0, scopes, 1, myParentScopes.length); // depends on control dependency: [if], data = [(myParentScopes]
}
InternalContext newContext = new InternalContext(template, localContext.out, Vars.EMPTY,
indexers, varSize, scopes);
newContext.localContext = localContext;
return newContext;
} }
|
public class class_name {
private void removeLayoutParamsRule(RelativeLayout.LayoutParams layoutParams) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
layoutParams.removeRule(RelativeLayout.ALIGN_PARENT_RIGHT);
layoutParams.removeRule(RelativeLayout.ALIGN_PARENT_END);
layoutParams.removeRule(RelativeLayout.ALIGN_PARENT_LEFT);
layoutParams.removeRule(RelativeLayout.ALIGN_PARENT_START);
} else {
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_RIGHT, 0);
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_LEFT, 0);
}
} }
|
public class class_name {
private void removeLayoutParamsRule(RelativeLayout.LayoutParams layoutParams) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
layoutParams.removeRule(RelativeLayout.ALIGN_PARENT_RIGHT); // depends on control dependency: [if], data = [none]
layoutParams.removeRule(RelativeLayout.ALIGN_PARENT_END); // depends on control dependency: [if], data = [none]
layoutParams.removeRule(RelativeLayout.ALIGN_PARENT_LEFT); // depends on control dependency: [if], data = [none]
layoutParams.removeRule(RelativeLayout.ALIGN_PARENT_START); // depends on control dependency: [if], data = [none]
} else {
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_RIGHT, 0); // depends on control dependency: [if], data = [none]
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_LEFT, 0); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private void handleConref() {
// Get pure conref targets
final Set<URI> pureConrefTargets = new HashSet<>(128);
for (final URI target: conrefTargetSet) {
if (!nonConrefCopytoTargetSet.contains(target)) {
pureConrefTargets.add(target);
}
}
conrefTargetSet = pureConrefTargets;
// Remove pure conref targets from fullTopicSet
fullTopicSet.removeAll(pureConrefTargets);
// Treat pure conref targets same as resource-only
resourceOnlySet.addAll(pureConrefTargets);
} }
|
public class class_name {
private void handleConref() {
// Get pure conref targets
final Set<URI> pureConrefTargets = new HashSet<>(128);
for (final URI target: conrefTargetSet) {
if (!nonConrefCopytoTargetSet.contains(target)) {
pureConrefTargets.add(target); // depends on control dependency: [if], data = [none]
}
}
conrefTargetSet = pureConrefTargets;
// Remove pure conref targets from fullTopicSet
fullTopicSet.removeAll(pureConrefTargets);
// Treat pure conref targets same as resource-only
resourceOnlySet.addAll(pureConrefTargets);
} }
|
public class class_name {
void writeNormalizedChars(
final char ch[],
final int start,
final int length,
final boolean useLineSep)
throws IOException, org.xml.sax.SAXException
{
final String encoding = getEncoding();
final java.io.Writer writer = m_writer;
final int end = start + length;
/* copy a few "constants" before the loop for performance */
final char S_LINEFEED = CharInfo.S_LINEFEED;
// This for() loop always increments i by one at the end
// of the loop. Additional increments of i adjust for when
// two input characters (a high/low UTF16 surrogate pair)
// are processed.
for (int i = start; i < end; i++) {
final char c = ch[i];
if (S_LINEFEED == c && useLineSep) {
writer.write(m_lineSep, 0, m_lineSepLen);
// one input char processed
} else if (m_encodingInfo.isInEncoding(c)) {
writer.write(c);
// one input char processed
} else if (Encodings.isHighUTF16Surrogate(c)) {
final int codePoint = writeUTF16Surrogate(c, ch, i, end);
if (codePoint != 0) {
// I think we can just emit the message,
// not crash and burn.
final String integralValue = Integer.toString(codePoint);
final String msg = Utils.messages.createMessage(
MsgKey.ER_ILLEGAL_CHARACTER,
new Object[] { integralValue, encoding });
//Older behavior was to throw the message,
//but newer gentler behavior is to write a message to System.err
//throw new SAXException(msg);
System.err.println(msg);
}
i++; // two input chars processed
} else {
// Don't know what to do with this char, it is
// not in the encoding and not a high char in
// a surrogate pair, so write out as an entity ref
if (encoding != null) {
/* The output encoding is known,
* so somthing is wrong.
*/
// not in the encoding, so write out a character reference
writer.write('&');
writer.write('#');
writer.write(Integer.toString(c));
writer.write(';');
// I think we can just emit the message,
// not crash and burn.
final String integralValue = Integer.toString(c);
final String msg = Utils.messages.createMessage(
MsgKey.ER_ILLEGAL_CHARACTER,
new Object[] { integralValue, encoding });
//Older behavior was to throw the message,
//but newer gentler behavior is to write a message to System.err
//throw new SAXException(msg);
System.err.println(msg);
} else {
/* The output encoding is not known,
* so just write it out as-is.
*/
writer.write(c);
}
// one input char was processed
}
}
} }
|
public class class_name {
void writeNormalizedChars(
final char ch[],
final int start,
final int length,
final boolean useLineSep)
throws IOException, org.xml.sax.SAXException
{
final String encoding = getEncoding();
final java.io.Writer writer = m_writer;
final int end = start + length;
/* copy a few "constants" before the loop for performance */
final char S_LINEFEED = CharInfo.S_LINEFEED;
// This for() loop always increments i by one at the end
// of the loop. Additional increments of i adjust for when
// two input characters (a high/low UTF16 surrogate pair)
// are processed.
for (int i = start; i < end; i++) {
final char c = ch[i];
if (S_LINEFEED == c && useLineSep) {
writer.write(m_lineSep, 0, m_lineSepLen);
// one input char processed
} else if (m_encodingInfo.isInEncoding(c)) {
writer.write(c);
// one input char processed
} else if (Encodings.isHighUTF16Surrogate(c)) {
final int codePoint = writeUTF16Surrogate(c, ch, i, end);
if (codePoint != 0) {
// I think we can just emit the message,
// not crash and burn.
final String integralValue = Integer.toString(codePoint);
final String msg = Utils.messages.createMessage(
MsgKey.ER_ILLEGAL_CHARACTER,
new Object[] { integralValue, encoding });
//Older behavior was to throw the message,
//but newer gentler behavior is to write a message to System.err
//throw new SAXException(msg);
System.err.println(msg); // depends on control dependency: [if], data = [none]
}
i++; // two input chars processed
} else {
// Don't know what to do with this char, it is
// not in the encoding and not a high char in
// a surrogate pair, so write out as an entity ref
if (encoding != null) {
/* The output encoding is known,
* so somthing is wrong.
*/
// not in the encoding, so write out a character reference
writer.write('&'); // depends on control dependency: [if], data = [none]
writer.write('#'); // depends on control dependency: [if], data = [none]
writer.write(Integer.toString(c)); // depends on control dependency: [if], data = [none]
writer.write(';'); // depends on control dependency: [if], data = [none]
// I think we can just emit the message,
// not crash and burn.
final String integralValue = Integer.toString(c);
final String msg = Utils.messages.createMessage(
MsgKey.ER_ILLEGAL_CHARACTER,
new Object[] { integralValue, encoding });
//Older behavior was to throw the message,
//but newer gentler behavior is to write a message to System.err
//throw new SAXException(msg);
System.err.println(msg); // depends on control dependency: [if], data = [none]
} else {
/* The output encoding is not known,
* so just write it out as-is.
*/
writer.write(c); // depends on control dependency: [if], data = [none]
}
// one input char was processed
}
}
} }
|
public class class_name {
public final hqlParser.takeClause_return takeClause() throws RecognitionException {
hqlParser.takeClause_return retval = new hqlParser.takeClause_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token TAKE134=null;
Token NUM_INT135=null;
ParserRuleReturnScope parameter136 =null;
CommonTree TAKE134_tree=null;
CommonTree NUM_INT135_tree=null;
try {
// hql.g:344:2: ( TAKE ^ ( NUM_INT | parameter ) )
// hql.g:344:4: TAKE ^ ( NUM_INT | parameter )
{
root_0 = (CommonTree)adaptor.nil();
TAKE134=(Token)match(input,TAKE,FOLLOW_TAKE_in_takeClause1650);
TAKE134_tree = (CommonTree)adaptor.create(TAKE134);
root_0 = (CommonTree)adaptor.becomeRoot(TAKE134_tree, root_0);
// hql.g:344:10: ( NUM_INT | parameter )
int alt42=2;
int LA42_0 = input.LA(1);
if ( (LA42_0==NUM_INT) ) {
alt42=1;
}
else if ( (LA42_0==COLON||LA42_0==PARAM) ) {
alt42=2;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 42, 0, input);
throw nvae;
}
switch (alt42) {
case 1 :
// hql.g:344:11: NUM_INT
{
NUM_INT135=(Token)match(input,NUM_INT,FOLLOW_NUM_INT_in_takeClause1654);
NUM_INT135_tree = (CommonTree)adaptor.create(NUM_INT135);
adaptor.addChild(root_0, NUM_INT135_tree);
}
break;
case 2 :
// hql.g:344:21: parameter
{
pushFollow(FOLLOW_parameter_in_takeClause1658);
parameter136=parameter();
state._fsp--;
adaptor.addChild(root_0, parameter136.getTree());
}
break;
}
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
} }
|
public class class_name {
public final hqlParser.takeClause_return takeClause() throws RecognitionException {
hqlParser.takeClause_return retval = new hqlParser.takeClause_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
Token TAKE134=null;
Token NUM_INT135=null;
ParserRuleReturnScope parameter136 =null;
CommonTree TAKE134_tree=null;
CommonTree NUM_INT135_tree=null;
try {
// hql.g:344:2: ( TAKE ^ ( NUM_INT | parameter ) )
// hql.g:344:4: TAKE ^ ( NUM_INT | parameter )
{
root_0 = (CommonTree)adaptor.nil();
TAKE134=(Token)match(input,TAKE,FOLLOW_TAKE_in_takeClause1650);
TAKE134_tree = (CommonTree)adaptor.create(TAKE134);
root_0 = (CommonTree)adaptor.becomeRoot(TAKE134_tree, root_0);
// hql.g:344:10: ( NUM_INT | parameter )
int alt42=2;
int LA42_0 = input.LA(1);
if ( (LA42_0==NUM_INT) ) {
alt42=1; // depends on control dependency: [if], data = [none]
}
else if ( (LA42_0==COLON||LA42_0==PARAM) ) {
alt42=2; // depends on control dependency: [if], data = [none]
}
else {
NoViableAltException nvae =
new NoViableAltException("", 42, 0, input);
throw nvae;
}
switch (alt42) {
case 1 :
// hql.g:344:11: NUM_INT
{
NUM_INT135=(Token)match(input,NUM_INT,FOLLOW_NUM_INT_in_takeClause1654);
NUM_INT135_tree = (CommonTree)adaptor.create(NUM_INT135);
adaptor.addChild(root_0, NUM_INT135_tree);
}
break;
case 2 :
// hql.g:344:21: parameter
{
pushFollow(FOLLOW_parameter_in_takeClause1658);
parameter136=parameter();
state._fsp--;
adaptor.addChild(root_0, parameter136.getTree());
}
break;
}
}
retval.stop = input.LT(-1);
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (CommonTree)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
// do for sure before leaving
}
return retval;
} }
|
public class class_name {
@Override
public Point3d getPoint3d() {
Point3d point3d = super.getPoint3d();
if (point3d == null) {
logger.debug("Getting point3d: null");
} else {
logger.debug("Getting point3d: x=" + point3d.x + ", y=" + point3d.y, ", z=" + point3d.z);
}
return point3d;
} }
|
public class class_name {
@Override
public Point3d getPoint3d() {
Point3d point3d = super.getPoint3d();
if (point3d == null) {
logger.debug("Getting point3d: null"); // depends on control dependency: [if], data = [none]
} else {
logger.debug("Getting point3d: x=" + point3d.x + ", y=" + point3d.y, ", z=" + point3d.z); // depends on control dependency: [if], data = [none]
}
return point3d;
} }
|
public class class_name {
Context registerContext(final String path, final List<String> virtualHosts) {
VHostMapping host = null;
for (final VHostMapping vhost : vHosts) {
if (virtualHosts.equals(vhost.getAliases())) {
host = vhost;
break;
}
}
if (host == null) {
host = new VHostMapping(this, virtualHosts);
vHosts.add(host);
}
final Context context = new Context(path, host, this);
contexts.add(context);
return context;
} }
|
public class class_name {
Context registerContext(final String path, final List<String> virtualHosts) {
VHostMapping host = null;
for (final VHostMapping vhost : vHosts) {
if (virtualHosts.equals(vhost.getAliases())) {
host = vhost; // depends on control dependency: [if], data = [none]
break;
}
}
if (host == null) {
host = new VHostMapping(this, virtualHosts); // depends on control dependency: [if], data = [none]
vHosts.add(host); // depends on control dependency: [if], data = [(host]
}
final Context context = new Context(path, host, this);
contexts.add(context);
return context;
} }
|
public class class_name {
@GET
@Path("/application/{id}")
@Produces(MediaType.APPLICATION_JSON)
public Response pushMessageInformationPerApplication(
@PathParam("id") String id,
@QueryParam("page") Integer page,
@QueryParam("per_page") Integer pageSize,
@QueryParam("sort") String sorting,
@QueryParam("search") String search) {
pageSize = parsePageSize(pageSize);
if (page == null) {
page = 0;
}
if (id == null) {
return Response.status(Response.Status.NOT_FOUND).entity("Could not find requested information").build();
}
PageResult<FlatPushMessageInformation, MessageMetrics> pageResult =
metricsService.findAllFlatsForPushApplication(id, search, isAscendingOrder(sorting), page, pageSize);
return Response.ok(pageResult.getResultList())
.header("total", pageResult.getAggregate().getCount())
.header("receivers", "0")
.header("appOpenedCounter", pageResult.getAggregate().getAppOpenedCounter())
.build();
} }
|
public class class_name {
@GET
@Path("/application/{id}")
@Produces(MediaType.APPLICATION_JSON)
public Response pushMessageInformationPerApplication(
@PathParam("id") String id,
@QueryParam("page") Integer page,
@QueryParam("per_page") Integer pageSize,
@QueryParam("sort") String sorting,
@QueryParam("search") String search) {
pageSize = parsePageSize(pageSize);
if (page == null) {
page = 0; // depends on control dependency: [if], data = [none]
}
if (id == null) {
return Response.status(Response.Status.NOT_FOUND).entity("Could not find requested information").build(); // depends on control dependency: [if], data = [none]
}
PageResult<FlatPushMessageInformation, MessageMetrics> pageResult =
metricsService.findAllFlatsForPushApplication(id, search, isAscendingOrder(sorting), page, pageSize);
return Response.ok(pageResult.getResultList())
.header("total", pageResult.getAggregate().getCount())
.header("receivers", "0")
.header("appOpenedCounter", pageResult.getAggregate().getAppOpenedCounter())
.build();
} }
|
public class class_name {
public static String toOpcodeName(int opcode)
{
switch (opcode)
{
case CONTINUATION:
return "CONTINUATION";
case TEXT:
return "TEXT";
case BINARY:
return "BINARY";
case CLOSE:
return "CLOSE";
case PING:
return "PING";
case PONG:
return "PONG";
default:
break;
}
if (0x1 <= opcode && opcode <= 0x7)
{
return String.format("DATA(0x%X)", opcode);
}
if (0x8 <= opcode && opcode <= 0xF)
{
return String.format("CONTROL(0x%X)", opcode);
}
return String.format("0x%X", opcode);
} }
|
public class class_name {
public static String toOpcodeName(int opcode)
{
switch (opcode)
{
case CONTINUATION:
return "CONTINUATION";
case TEXT:
return "TEXT";
case BINARY:
return "BINARY";
case CLOSE:
return "CLOSE";
case PING:
return "PING";
case PONG:
return "PONG";
default:
break;
}
if (0x1 <= opcode && opcode <= 0x7)
{
return String.format("DATA(0x%X)", opcode); // depends on control dependency: [if], data = [none]
}
if (0x8 <= opcode && opcode <= 0xF)
{
return String.format("CONTROL(0x%X)", opcode); // depends on control dependency: [if], data = [none]
}
return String.format("0x%X", opcode);
} }
|
public class class_name {
public static boolean haveEqualSets(int[] arra, int[] arrb, int count) {
if (ArrayUtil.haveEqualArrays(arra, arrb, count)) {
return true;
}
if (count > arra.length || count > arrb.length) {
return false;
}
if (count == 1) {
return arra[0] == arrb[0];
}
int[] tempa = (int[]) resizeArray(arra, count);
int[] tempb = (int[]) resizeArray(arrb, count);
sortArray(tempa);
sortArray(tempb);
for (int j = 0; j < count; j++) {
if (tempa[j] != tempb[j]) {
return false;
}
}
return true;
} }
|
public class class_name {
public static boolean haveEqualSets(int[] arra, int[] arrb, int count) {
if (ArrayUtil.haveEqualArrays(arra, arrb, count)) {
return true; // depends on control dependency: [if], data = [none]
}
if (count > arra.length || count > arrb.length) {
return false; // depends on control dependency: [if], data = [none]
}
if (count == 1) {
return arra[0] == arrb[0]; // depends on control dependency: [if], data = [none]
}
int[] tempa = (int[]) resizeArray(arra, count);
int[] tempb = (int[]) resizeArray(arrb, count);
sortArray(tempa);
sortArray(tempb);
for (int j = 0; j < count; j++) {
if (tempa[j] != tempb[j]) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
} }
|
public class class_name {
public synchronized void resumeControlPoint(final String entryPoint) {
for (ControlPoint ep : entryPoints.values()) {
if (ep.getEntryPoint().equals(entryPoint)) {
ep.resume();
}
}
} }
|
public class class_name {
public synchronized void resumeControlPoint(final String entryPoint) {
for (ControlPoint ep : entryPoints.values()) {
if (ep.getEntryPoint().equals(entryPoint)) {
ep.resume(); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public boolean verifySignature(String message, String signature, PublicKey publicKey){
boolean result = false;
try {
byte[] sigbytes = Base64.decode(signature.getBytes());
Signature sigChecker = SIG_CHECKER.get();
sigChecker.initVerify(publicKey);
sigChecker.update(message.getBytes());
result = sigChecker.verify(sigbytes);
} catch (InvalidKeyException e) {
// Rare exception: The private key was incorrectly formatted
} catch (SignatureException e) {
// Rare exception: Catch-all exception for the signature checker
}
return result;
} }
|
public class class_name {
public boolean verifySignature(String message, String signature, PublicKey publicKey){
boolean result = false;
try {
byte[] sigbytes = Base64.decode(signature.getBytes());
Signature sigChecker = SIG_CHECKER.get();
sigChecker.initVerify(publicKey); // depends on control dependency: [try], data = [none]
sigChecker.update(message.getBytes()); // depends on control dependency: [try], data = [none]
result = sigChecker.verify(sigbytes); // depends on control dependency: [try], data = [none]
} catch (InvalidKeyException e) {
// Rare exception: The private key was incorrectly formatted
} catch (SignatureException e) { // depends on control dependency: [catch], data = [none]
// Rare exception: Catch-all exception for the signature checker
} // depends on control dependency: [catch], data = [none]
return result;
} }
|
public class class_name {
public ImmutableMap<Name, Long> snapshotModifiedTimes(JimfsPath path) throws IOException {
ImmutableMap.Builder<Name, Long> modifiedTimes = ImmutableMap.builder();
store.readLock().lock();
try {
Directory dir = (Directory) lookUp(path, Options.FOLLOW_LINKS).requireDirectory(path).file();
// TODO(cgdecker): Investigate whether WatchServices should keep a reference to the actual
// directory when SecureDirectoryStream is supported rather than looking up the directory
// each time the WatchService polls
for (DirectoryEntry entry : dir) {
if (!entry.name().equals(Name.SELF) && !entry.name().equals(Name.PARENT)) {
modifiedTimes.put(entry.name(), entry.file().getLastModifiedTime());
}
}
return modifiedTimes.build();
} finally {
store.readLock().unlock();
}
} }
|
public class class_name {
public ImmutableMap<Name, Long> snapshotModifiedTimes(JimfsPath path) throws IOException {
ImmutableMap.Builder<Name, Long> modifiedTimes = ImmutableMap.builder();
store.readLock().lock();
try {
Directory dir = (Directory) lookUp(path, Options.FOLLOW_LINKS).requireDirectory(path).file();
// TODO(cgdecker): Investigate whether WatchServices should keep a reference to the actual
// directory when SecureDirectoryStream is supported rather than looking up the directory
// each time the WatchService polls
for (DirectoryEntry entry : dir) {
if (!entry.name().equals(Name.SELF) && !entry.name().equals(Name.PARENT)) {
modifiedTimes.put(entry.name(), entry.file().getLastModifiedTime()); // depends on control dependency: [if], data = [none]
}
}
return modifiedTimes.build();
} finally {
store.readLock().unlock();
}
} }
|
public class class_name {
@Override
public V remove(final long key) {
V x = firstGenerationQueue.remove(key);
if (x != null) {
fireListenersAboutDeletion(key, x);
} else {
x = secondGenerationQueue.remove(key);
if (x != null) {
fireListenersAboutDeletion(key, x);
}
}
return x;
} }
|
public class class_name {
@Override
public V remove(final long key) {
V x = firstGenerationQueue.remove(key);
if (x != null) {
fireListenersAboutDeletion(key, x); // depends on control dependency: [if], data = [none]
} else {
x = secondGenerationQueue.remove(key); // depends on control dependency: [if], data = [none]
if (x != null) {
fireListenersAboutDeletion(key, x); // depends on control dependency: [if], data = [none]
}
}
return x;
} }
|
public class class_name {
public ArrayList<WebElement> getWebElements(final By by, boolean onlySufficientlyVisbile){
boolean javaScriptWasExecuted = executeJavaScript(by, false);
if(config.useJavaScriptToClickWebElements){
if(!javaScriptWasExecuted){
return new ArrayList<WebElement>();
}
return webElementCreator.getWebElementsFromWebViews();
}
return getWebElements(javaScriptWasExecuted, onlySufficientlyVisbile);
} }
|
public class class_name {
public ArrayList<WebElement> getWebElements(final By by, boolean onlySufficientlyVisbile){
boolean javaScriptWasExecuted = executeJavaScript(by, false);
if(config.useJavaScriptToClickWebElements){
if(!javaScriptWasExecuted){
return new ArrayList<WebElement>(); // depends on control dependency: [if], data = [none]
}
return webElementCreator.getWebElementsFromWebViews(); // depends on control dependency: [if], data = [none]
}
return getWebElements(javaScriptWasExecuted, onlySufficientlyVisbile);
} }
|
public class class_name {
public TransportApiResult<List<Stop>> getStops(StopQueryOptions options)
{
if (options == null)
{
options = StopQueryOptions.defaultQueryOptions();
}
return TransportApiClientCalls.getStops(tokenComponent, settings, options, null, null, null);
} }
|
public class class_name {
public TransportApiResult<List<Stop>> getStops(StopQueryOptions options)
{
if (options == null)
{
options = StopQueryOptions.defaultQueryOptions();
// depends on control dependency: [if], data = [none]
}
return TransportApiClientCalls.getStops(tokenComponent, settings, options, null, null, null);
} }
|
public class class_name {
public Optional<BindingTableRule> findBindingTableRule(final String logicTableName) {
for (BindingTableRule each : bindingTableRules) {
if (each.hasLogicTable(logicTableName)) {
return Optional.of(each);
}
}
return Optional.absent();
} }
|
public class class_name {
public Optional<BindingTableRule> findBindingTableRule(final String logicTableName) {
for (BindingTableRule each : bindingTableRules) {
if (each.hasLogicTable(logicTableName)) {
return Optional.of(each); // depends on control dependency: [if], data = [none]
}
}
return Optional.absent();
} }
|
public class class_name {
private List<PatternCacheControl> buildPatternsList(ServiceProperties properties) {
Map<String, PatternCacheControl> patterns = new LinkedHashMap<>();
List<ServiceProperties> locationsList = properties.getNested("location");
if (locationsList != null && locationsList.size() != 0) {
for (ServiceProperties location : locationsList) {
String directiveList = location.get("cache-control");
String[] patternList = location.get("patterns").split("\\s+");
for (String pattern : patternList) {
patterns.put(pattern, new PatternCacheControl(pattern, directiveList));
}
}
resolvePatternSpecificity(patterns);
return sortByMatchingPatternCount(patterns);
}
return new ArrayList<>(patterns.values());
} }
|
public class class_name {
private List<PatternCacheControl> buildPatternsList(ServiceProperties properties) {
Map<String, PatternCacheControl> patterns = new LinkedHashMap<>();
List<ServiceProperties> locationsList = properties.getNested("location");
if (locationsList != null && locationsList.size() != 0) {
for (ServiceProperties location : locationsList) {
String directiveList = location.get("cache-control");
String[] patternList = location.get("patterns").split("\\s+");
for (String pattern : patternList) {
patterns.put(pattern, new PatternCacheControl(pattern, directiveList)); // depends on control dependency: [for], data = [pattern]
}
}
resolvePatternSpecificity(patterns); // depends on control dependency: [if], data = [none]
return sortByMatchingPatternCount(patterns); // depends on control dependency: [if], data = [none]
}
return new ArrayList<>(patterns.values());
} }
|
public class class_name {
protected void appendCellBuffer(ColumnState state, CellBuffer buffer) {
int bufferRest = buffer.getRest();
boolean hyphenationActive = (buffer.maxLength >= TextColumnInfo.MINIMUM_WIDTH_FOR_INDENT_AND_HYPHEN);
boolean todo = true;
// while ((todo) && (bufferRest > 0)) {
while (todo) {
TextSegment currentSegment = state.currentSegment;
if (currentSegment.getType() == TextSegmentType.NEWLINE) {
switch (state.getColumnInfo().getIndentationMode()) {
case NO_INDENT_AFTER_NEWLINE:
state.indent = false;
break;
case NO_INDENT_AFTER_DOUBLE_NEWLINE:
if (state.getSubsequentNewlineCount() >= 2) {
state.indent = false;
} else {
state.indent = true;
}
break;
case INDENT_AFTER_NEWLINE:
state.indent = true;
break;
default :
throw new IllegalStateException("" + state.getColumnInfo().getIndentationMode());
}
state.proceedTextSegment();
todo = false;
} else {
// currentSegment is no newline event...
int segmentStartOffset = state.textIndex - currentSegment.startIndex;
int segmentRest = currentSegment.getLength() - segmentStartOffset;
int space = bufferRest - segmentRest;
TextSegmentType nextType = null;
if (state.nextSegment != null) {
nextType = state.nextSegment.type;
}
if (hyphenationActive) {
if (TextSegmentType.PUNCTUATION_CHARACTER == nextType) {
space--;
} else if (TextSegmentType.NON_BREAKING_CHARACTER == nextType) {
// TODO
// latest|Nmidnight
// * la-|testNmidnight
// latestNmid|night
// * latest|midnight
// latestNmid|night
// * latestNmid-|night
space--;
}
}
if (space >= 0) {
// entire segment fits...
bufferRest = buffer.append(currentSegment.text, state.textIndex, currentSegment.endIndex);
todo = state.proceedTextSegment();
} else {
// segment does NOT fit and has to be wrapped...
if (hyphenationActive && (currentSegment.getType() == TextSegmentType.WORD)) {
Hyphenation hyphenation = currentSegment.getHyphenatedWord();
// int hyphenationBefore = bufferRest + segmentStartOffset;
// space is actually negative here...
int hyphenationBefore = currentSegment.getLength() + space;
if (hyphenationBefore == currentSegment.endIndex) {
hyphenationBefore++;
}
int hyphenationPoint = hyphenation.getHyphenationBefore(hyphenationBefore);
int hyphenationOffset = hyphenationPoint - segmentStartOffset;
// hyphenation point available that fits?
if (hyphenationOffset <= 0) {
// determine ratio of filled text length to space left
int fillRatio;
if (bufferRest > 6) {
// accept no rest longer than 6 spaces...
fillRatio = 0;
} else if (bufferRest <= 2) {
// avoid wrapping word at a single character e.g. break "is" as:
// i-
// s
fillRatio = Integer.MAX_VALUE;
} else {
fillRatio = buffer.length() / bufferRest;
}
if (fillRatio <= 3) {
hyphenationOffset = bufferRest - 1;
} else {
// high ratio means cell is filled okay and only little rest
// left do NOT append any
todo = false;
break;
}
}
int end = state.textIndex + hyphenationOffset;
if (end + 1 == currentSegment.endIndex) {
end--;
}
bufferRest = buffer.append(currentSegment.text, state.textIndex, end);
if (end < currentSegment.endIndex) {
bufferRest = buffer.append(state.hyphenator.getHyphen());
} else {
todo = state.proceedTextSegment();
}
state.textIndex = end;
todo = false;
} else {
int end = state.textIndex + bufferRest;
buffer.append(currentSegment.text, state.textIndex, end);
state.textIndex = end;
todo = false;
}
}
}
}
} }
|
public class class_name {
protected void appendCellBuffer(ColumnState state, CellBuffer buffer) {
int bufferRest = buffer.getRest();
boolean hyphenationActive = (buffer.maxLength >= TextColumnInfo.MINIMUM_WIDTH_FOR_INDENT_AND_HYPHEN);
boolean todo = true;
// while ((todo) && (bufferRest > 0)) {
while (todo) {
TextSegment currentSegment = state.currentSegment;
if (currentSegment.getType() == TextSegmentType.NEWLINE) {
switch (state.getColumnInfo().getIndentationMode()) {
case NO_INDENT_AFTER_NEWLINE:
state.indent = false;
break;
case NO_INDENT_AFTER_DOUBLE_NEWLINE:
if (state.getSubsequentNewlineCount() >= 2) {
state.indent = false; // depends on control dependency: [if], data = [none]
} else {
state.indent = true; // depends on control dependency: [if], data = [none]
}
break;
case INDENT_AFTER_NEWLINE:
state.indent = true;
break;
default :
throw new IllegalStateException("" + state.getColumnInfo().getIndentationMode());
}
state.proceedTextSegment(); // depends on control dependency: [if], data = [none]
todo = false; // depends on control dependency: [if], data = [none]
} else {
// currentSegment is no newline event...
int segmentStartOffset = state.textIndex - currentSegment.startIndex;
int segmentRest = currentSegment.getLength() - segmentStartOffset;
int space = bufferRest - segmentRest;
TextSegmentType nextType = null;
if (state.nextSegment != null) {
nextType = state.nextSegment.type; // depends on control dependency: [if], data = [none]
}
if (hyphenationActive) {
if (TextSegmentType.PUNCTUATION_CHARACTER == nextType) {
space--; // depends on control dependency: [if], data = [none]
} else if (TextSegmentType.NON_BREAKING_CHARACTER == nextType) {
// TODO
// latest|Nmidnight
// * la-|testNmidnight
// latestNmid|night
// * latest|midnight
// latestNmid|night
// * latestNmid-|night
space--; // depends on control dependency: [if], data = [none]
}
}
if (space >= 0) {
// entire segment fits...
bufferRest = buffer.append(currentSegment.text, state.textIndex, currentSegment.endIndex); // depends on control dependency: [if], data = [none]
todo = state.proceedTextSegment(); // depends on control dependency: [if], data = [none]
} else {
// segment does NOT fit and has to be wrapped...
if (hyphenationActive && (currentSegment.getType() == TextSegmentType.WORD)) {
Hyphenation hyphenation = currentSegment.getHyphenatedWord();
// int hyphenationBefore = bufferRest + segmentStartOffset;
// space is actually negative here...
int hyphenationBefore = currentSegment.getLength() + space;
if (hyphenationBefore == currentSegment.endIndex) {
hyphenationBefore++; // depends on control dependency: [if], data = [none]
}
int hyphenationPoint = hyphenation.getHyphenationBefore(hyphenationBefore);
int hyphenationOffset = hyphenationPoint - segmentStartOffset;
// hyphenation point available that fits?
if (hyphenationOffset <= 0) {
// determine ratio of filled text length to space left
int fillRatio;
if (bufferRest > 6) {
// accept no rest longer than 6 spaces...
fillRatio = 0; // depends on control dependency: [if], data = [none]
} else if (bufferRest <= 2) {
// avoid wrapping word at a single character e.g. break "is" as:
// i-
// s
fillRatio = Integer.MAX_VALUE; // depends on control dependency: [if], data = [none]
} else {
fillRatio = buffer.length() / bufferRest; // depends on control dependency: [if], data = [none]
}
if (fillRatio <= 3) {
hyphenationOffset = bufferRest - 1; // depends on control dependency: [if], data = [none]
} else {
// high ratio means cell is filled okay and only little rest
// left do NOT append any
todo = false; // depends on control dependency: [if], data = [none]
break;
}
}
int end = state.textIndex + hyphenationOffset;
if (end + 1 == currentSegment.endIndex) {
end--; // depends on control dependency: [if], data = [none]
}
bufferRest = buffer.append(currentSegment.text, state.textIndex, end); // depends on control dependency: [if], data = [none]
if (end < currentSegment.endIndex) {
bufferRest = buffer.append(state.hyphenator.getHyphen()); // depends on control dependency: [if], data = [none]
} else {
todo = state.proceedTextSegment(); // depends on control dependency: [if], data = [none]
}
state.textIndex = end; // depends on control dependency: [if], data = [none]
todo = false; // depends on control dependency: [if], data = [none]
} else {
int end = state.textIndex + bufferRest;
buffer.append(currentSegment.text, state.textIndex, end); // depends on control dependency: [if], data = [none]
state.textIndex = end; // depends on control dependency: [if], data = [none]
todo = false; // depends on control dependency: [if], data = [none]
}
}
}
}
} }
|
public class class_name {
public void registerView(@Nullable RegisterViewStatusListener callback) {
if (Branch.getInstance() != null) {
Branch.getInstance().registerView(this, callback);
} else {
if (callback != null) {
callback.onRegisterViewFinished(false, new BranchError("Register view error", BranchError.ERR_BRANCH_NOT_INSTANTIATED));
}
}
} }
|
public class class_name {
public void registerView(@Nullable RegisterViewStatusListener callback) {
if (Branch.getInstance() != null) {
Branch.getInstance().registerView(this, callback); // depends on control dependency: [if], data = [none]
} else {
if (callback != null) {
callback.onRegisterViewFinished(false, new BranchError("Register view error", BranchError.ERR_BRANCH_NOT_INSTANTIATED)); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
private void setPlaying(boolean playing) {
PlaybackState oldState = currentSimpleState();
if (oldState != null && oldState.playing != playing) {
setPlaybackState(oldState.player, oldState.position, playing);
}
} }
|
public class class_name {
private void setPlaying(boolean playing) {
PlaybackState oldState = currentSimpleState();
if (oldState != null && oldState.playing != playing) {
setPlaybackState(oldState.player, oldState.position, playing); // depends on control dependency: [if], data = [(oldState]
}
} }
|
public class class_name {
@SuppressLint("DefaultLocale")
@Override
public void setProgress(int progress) {
if (getParent() instanceof BootstrapProgressBarGroup){
this.userProgress = 0;
setMaxProgress(progress);
}else {
if (progress < 0 || progress > maxProgress) {
throw new IllegalArgumentException(
String.format("Invalid value '%d' - progress must be an integer in the range 0-%d", progress, maxProgress));
}
}
this.userProgress = progress;
if (animated) {
startProgressUpdateAnimation();
}
else {
this.drawnProgress = progress;
invalidate();
}
ViewParent parent = getParent();
if (parent != null) {
if (parent instanceof BootstrapProgressBarGroup) {
BootstrapProgressBarGroup parentGroup = (BootstrapProgressBarGroup) parent;
parentGroup.onProgressChanged(this);
}
}
} }
|
public class class_name {
@SuppressLint("DefaultLocale")
@Override
public void setProgress(int progress) {
if (getParent() instanceof BootstrapProgressBarGroup){
this.userProgress = 0; // depends on control dependency: [if], data = [none]
setMaxProgress(progress); // depends on control dependency: [if], data = [none]
}else {
if (progress < 0 || progress > maxProgress) {
throw new IllegalArgumentException(
String.format("Invalid value '%d' - progress must be an integer in the range 0-%d", progress, maxProgress));
}
}
this.userProgress = progress;
if (animated) {
startProgressUpdateAnimation(); // depends on control dependency: [if], data = [none]
}
else {
this.drawnProgress = progress; // depends on control dependency: [if], data = [none]
invalidate(); // depends on control dependency: [if], data = [none]
}
ViewParent parent = getParent();
if (parent != null) {
if (parent instanceof BootstrapProgressBarGroup) {
BootstrapProgressBarGroup parentGroup = (BootstrapProgressBarGroup) parent;
parentGroup.onProgressChanged(this); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public void post(final Object event) {
Set<Class> eventTypes = flattenHierarchy(event.getClass());
boolean handled = false;
for (Class c : eventTypes) {
for (EventHandler h : this.handlersByEventType.get(c)) {
try {
h.handle(event);
handled = true;
} catch (EventException e) {
if (event instanceof EventException) continue;
this.post(e); // If an exception is thrown, post the exception as an event, so that exception-handlers can deal with it
}
}
}
if (!handled && !(event instanceof DeadEvent)) {
this.post(new DeadEvent(event));
}
} }
|
public class class_name {
public void post(final Object event) {
Set<Class> eventTypes = flattenHierarchy(event.getClass());
boolean handled = false;
for (Class c : eventTypes) {
for (EventHandler h : this.handlersByEventType.get(c)) {
try {
h.handle(event); // depends on control dependency: [try], data = [none]
handled = true; // depends on control dependency: [try], data = [none]
} catch (EventException e) {
if (event instanceof EventException) continue;
this.post(e); // If an exception is thrown, post the exception as an event, so that exception-handlers can deal with it
} // depends on control dependency: [catch], data = [none]
}
}
if (!handled && !(event instanceof DeadEvent)) {
this.post(new DeadEvent(event)); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private synchronized CodeSource mapSignersToCodeSource(URL url, CodeSigner[] signers) {
Map map;
if (url == lastURL) {
map = lastURLMap;
} else {
map = (Map) urlToCodeSourceMap.get(url);
if (map == null) {
map = new HashMap();
urlToCodeSourceMap.put(url, map);
}
lastURLMap = map;
lastURL = url;
}
CodeSource cs = (CodeSource) map.get(signers);
if (cs == null) {
cs = new VerifierCodeSource(csdomain, url, signers);
signerToCodeSource.put(signers, cs);
}
return cs;
} }
|
public class class_name {
private synchronized CodeSource mapSignersToCodeSource(URL url, CodeSigner[] signers) {
Map map;
if (url == lastURL) {
map = lastURLMap; // depends on control dependency: [if], data = [none]
} else {
map = (Map) urlToCodeSourceMap.get(url); // depends on control dependency: [if], data = [(url]
if (map == null) {
map = new HashMap(); // depends on control dependency: [if], data = [none]
urlToCodeSourceMap.put(url, map); // depends on control dependency: [if], data = [none]
}
lastURLMap = map; // depends on control dependency: [if], data = [none]
lastURL = url; // depends on control dependency: [if], data = [none]
}
CodeSource cs = (CodeSource) map.get(signers);
if (cs == null) {
cs = new VerifierCodeSource(csdomain, url, signers); // depends on control dependency: [if], data = [(cs]
signerToCodeSource.put(signers, cs); // depends on control dependency: [if], data = [none]
}
return cs;
} }
|
public class class_name {
@Override
public BufferedImage filter(BufferedImage src, BufferedImage dst) {
final int width = src.getWidth();
final int height = src.getHeight();
final BufferedImage image;
if (dst == null) {
image = createCompatibleDestImage(src, null);
} else {
image = dst;
}
final int[] srcPixels = new int[width * height];
final int[] dstPixels = new int[width * height];
getPixels(src, 0, 0, width, height, srcPixels);
// horizontal pass
blur(srcPixels, dstPixels, width, height, radius);
// vertical pass
blur(dstPixels, srcPixels, height, width, radius);
// the result is now stored in srcPixels due to the 2nd pass
setPixels(image, 0, 0, width, height, srcPixels);
return image;
} }
|
public class class_name {
@Override
public BufferedImage filter(BufferedImage src, BufferedImage dst) {
final int width = src.getWidth();
final int height = src.getHeight();
final BufferedImage image;
if (dst == null) {
image = createCompatibleDestImage(src, null);
// depends on control dependency: [if], data = [null)]
} else {
image = dst;
// depends on control dependency: [if], data = [none]
}
final int[] srcPixels = new int[width * height];
final int[] dstPixels = new int[width * height];
getPixels(src, 0, 0, width, height, srcPixels);
// horizontal pass
blur(srcPixels, dstPixels, width, height, radius);
// vertical pass
blur(dstPixels, srcPixels, height, width, radius);
// the result is now stored in srcPixels due to the 2nd pass
setPixels(image, 0, 0, width, height, srcPixels);
return image;
} }
|
public class class_name {
public StringBuilder formatMeasurePerUnit(
Measure measure,
MeasureUnit perUnit,
StringBuilder appendTo,
FieldPosition pos) {
MeasureUnit resolvedUnit = MeasureUnit.resolveUnitPerUnit(
measure.getUnit(), perUnit);
if (resolvedUnit != null) {
Measure newMeasure = new Measure(measure.getNumber(), resolvedUnit);
return formatMeasure(newMeasure, numberFormat, appendTo, pos);
}
FieldPosition fpos = new FieldPosition(
pos.getFieldAttribute(), pos.getField());
int offset = withPerUnitAndAppend(
formatMeasure(measure, numberFormat, new StringBuilder(), fpos),
perUnit,
appendTo);
if (fpos.getBeginIndex() != 0 || fpos.getEndIndex() != 0) {
pos.setBeginIndex(fpos.getBeginIndex() + offset);
pos.setEndIndex(fpos.getEndIndex() + offset);
}
return appendTo;
} }
|
public class class_name {
public StringBuilder formatMeasurePerUnit(
Measure measure,
MeasureUnit perUnit,
StringBuilder appendTo,
FieldPosition pos) {
MeasureUnit resolvedUnit = MeasureUnit.resolveUnitPerUnit(
measure.getUnit(), perUnit);
if (resolvedUnit != null) {
Measure newMeasure = new Measure(measure.getNumber(), resolvedUnit);
return formatMeasure(newMeasure, numberFormat, appendTo, pos); // depends on control dependency: [if], data = [none]
}
FieldPosition fpos = new FieldPosition(
pos.getFieldAttribute(), pos.getField());
int offset = withPerUnitAndAppend(
formatMeasure(measure, numberFormat, new StringBuilder(), fpos),
perUnit,
appendTo);
if (fpos.getBeginIndex() != 0 || fpos.getEndIndex() != 0) {
pos.setBeginIndex(fpos.getBeginIndex() + offset); // depends on control dependency: [if], data = [(fpos.getBeginIndex()]
pos.setEndIndex(fpos.getEndIndex() + offset); // depends on control dependency: [if], data = [none]
}
return appendTo;
} }
|
public class class_name {
@Override
public <T> BulkFuture<Map<String, T>> asyncGetBulk(Iterator<String> keyIter,
Iterator<Transcoder<T>> tcIter) {
final Map<String, Future<T>> m = new ConcurrentHashMap<String, Future<T>>();
// This map does not need to be a ConcurrentHashMap
// because it is fully populated when it is used and
// used only to read the transcoder for a key.
final Map<String, Transcoder<T>> tcMap =
new HashMap<String, Transcoder<T>>();
// Break the gets down into groups by key
final Map<MemcachedNode, Collection<String>> chunks =
new HashMap<MemcachedNode, Collection<String>>();
final NodeLocator locator = mconn.getLocator();
while (keyIter.hasNext() && tcIter.hasNext()) {
String key = keyIter.next();
tcMap.put(key, tcIter.next());
StringUtils.validateKey(key, opFact instanceof BinaryOperationFactory);
final MemcachedNode primaryNode = locator.getPrimary(key);
MemcachedNode node = null;
if (primaryNode.isActive()) {
node = primaryNode;
} else {
for (Iterator<MemcachedNode> i = locator.getSequence(key); node == null
&& i.hasNext();) {
MemcachedNode n = i.next();
if (n.isActive()) {
node = n;
}
}
if (node == null) {
node = primaryNode;
}
}
assert node != null : "Didn't find a node for " + key;
Collection<String> ks = chunks.get(node);
if (ks == null) {
ks = new ArrayList<String>();
chunks.put(node, ks);
}
ks.add(key);
}
final AtomicInteger pendingChunks = new AtomicInteger(chunks.size());
int initialLatchCount = chunks.isEmpty() ? 0 : 1;
final CountDownLatch latch = new CountDownLatch(initialLatchCount);
final Collection<Operation> ops = new ArrayList<Operation>(chunks.size());
final BulkGetFuture<T> rv = new BulkGetFuture<T>(m, ops, latch, executorService);
GetOperation.Callback cb = new GetOperation.Callback() {
@Override
@SuppressWarnings("synthetic-access")
public void receivedStatus(OperationStatus status) {
if (status.getStatusCode() == StatusCode.ERR_NOT_MY_VBUCKET) {
pendingChunks.addAndGet(Integer.parseInt(status.getMessage()));
}
rv.setStatus(status);
}
@Override
public void gotData(String k, int flags, byte[] data) {
Transcoder<T> tc = tcMap.get(k);
m.put(k,
tcService.decode(tc, new CachedData(flags, data, tc.getMaxSize())));
}
@Override
public void complete() {
if (pendingChunks.decrementAndGet() <= 0) {
latch.countDown();
rv.signalComplete();
}
}
};
// Now that we know how many servers it breaks down into, and the latch
// is all set up, convert all of these strings collections to operations
final Map<MemcachedNode, Operation> mops =
new HashMap<MemcachedNode, Operation>();
for (Map.Entry<MemcachedNode, Collection<String>> me : chunks.entrySet()) {
Operation op = opFact.get(me.getValue(), cb);
mops.put(me.getKey(), op);
ops.add(op);
}
assert mops.size() == chunks.size();
mconn.checkState();
mconn.addOperations(mops);
return rv;
} }
|
public class class_name {
@Override
public <T> BulkFuture<Map<String, T>> asyncGetBulk(Iterator<String> keyIter,
Iterator<Transcoder<T>> tcIter) {
final Map<String, Future<T>> m = new ConcurrentHashMap<String, Future<T>>();
// This map does not need to be a ConcurrentHashMap
// because it is fully populated when it is used and
// used only to read the transcoder for a key.
final Map<String, Transcoder<T>> tcMap =
new HashMap<String, Transcoder<T>>();
// Break the gets down into groups by key
final Map<MemcachedNode, Collection<String>> chunks =
new HashMap<MemcachedNode, Collection<String>>();
final NodeLocator locator = mconn.getLocator();
while (keyIter.hasNext() && tcIter.hasNext()) {
String key = keyIter.next();
tcMap.put(key, tcIter.next()); // depends on control dependency: [while], data = [none]
StringUtils.validateKey(key, opFact instanceof BinaryOperationFactory); // depends on control dependency: [while], data = [none]
final MemcachedNode primaryNode = locator.getPrimary(key);
MemcachedNode node = null;
if (primaryNode.isActive()) {
node = primaryNode; // depends on control dependency: [if], data = [none]
} else {
for (Iterator<MemcachedNode> i = locator.getSequence(key); node == null
&& i.hasNext();) {
MemcachedNode n = i.next();
if (n.isActive()) {
node = n; // depends on control dependency: [if], data = [none]
}
}
if (node == null) {
node = primaryNode; // depends on control dependency: [if], data = [none]
}
}
assert node != null : "Didn't find a node for " + key; // depends on control dependency: [while], data = [none]
Collection<String> ks = chunks.get(node);
if (ks == null) {
ks = new ArrayList<String>(); // depends on control dependency: [if], data = [none]
chunks.put(node, ks); // depends on control dependency: [if], data = [none]
}
ks.add(key); // depends on control dependency: [while], data = [none]
}
final AtomicInteger pendingChunks = new AtomicInteger(chunks.size());
int initialLatchCount = chunks.isEmpty() ? 0 : 1;
final CountDownLatch latch = new CountDownLatch(initialLatchCount);
final Collection<Operation> ops = new ArrayList<Operation>(chunks.size());
final BulkGetFuture<T> rv = new BulkGetFuture<T>(m, ops, latch, executorService);
GetOperation.Callback cb = new GetOperation.Callback() {
@Override
@SuppressWarnings("synthetic-access")
public void receivedStatus(OperationStatus status) {
if (status.getStatusCode() == StatusCode.ERR_NOT_MY_VBUCKET) {
pendingChunks.addAndGet(Integer.parseInt(status.getMessage())); // depends on control dependency: [if], data = [none]
}
rv.setStatus(status);
}
@Override
public void gotData(String k, int flags, byte[] data) {
Transcoder<T> tc = tcMap.get(k);
m.put(k,
tcService.decode(tc, new CachedData(flags, data, tc.getMaxSize())));
}
@Override
public void complete() {
if (pendingChunks.decrementAndGet() <= 0) {
latch.countDown(); // depends on control dependency: [if], data = [none]
rv.signalComplete(); // depends on control dependency: [if], data = [none]
}
}
};
// Now that we know how many servers it breaks down into, and the latch
// is all set up, convert all of these strings collections to operations
final Map<MemcachedNode, Operation> mops =
new HashMap<MemcachedNode, Operation>();
for (Map.Entry<MemcachedNode, Collection<String>> me : chunks.entrySet()) {
Operation op = opFact.get(me.getValue(), cb);
mops.put(me.getKey(), op); // depends on control dependency: [for], data = [me]
ops.add(op); // depends on control dependency: [for], data = [none]
}
assert mops.size() == chunks.size();
mconn.checkState();
mconn.addOperations(mops);
return rv;
} }
|
public class class_name {
protected boolean report(IsMessageSet messageSet){
if(messageSet==null){
return false;
}
E_MessageType type = null;
if(messageSet.isErrorSet()){
type = E_MessageType.ERROR;
}
else if(messageSet.isWarningSet()){
type = E_MessageType.WARNING;
}
else if(messageSet.isInfoSet()){
type = E_MessageType.INFO;
}
else{
return false;
}
if(!this.messageHandlers.containsKey(type)){
return false;
}
MessageTypeHandler handler = this.messageHandlers.get(type);
String template = messageSet.render();
handler.handleMessage(template, type, this.max100stg.getInstanceOf("max"), this.appID);
this.messages.put(template, type);
return true;
} }
|
public class class_name {
protected boolean report(IsMessageSet messageSet){
if(messageSet==null){
return false; // depends on control dependency: [if], data = [none]
}
E_MessageType type = null;
if(messageSet.isErrorSet()){
type = E_MessageType.ERROR; // depends on control dependency: [if], data = [none]
}
else if(messageSet.isWarningSet()){
type = E_MessageType.WARNING; // depends on control dependency: [if], data = [none]
}
else if(messageSet.isInfoSet()){
type = E_MessageType.INFO; // depends on control dependency: [if], data = [none]
}
else{
return false; // depends on control dependency: [if], data = [none]
}
if(!this.messageHandlers.containsKey(type)){
return false; // depends on control dependency: [if], data = [none]
}
MessageTypeHandler handler = this.messageHandlers.get(type);
String template = messageSet.render();
handler.handleMessage(template, type, this.max100stg.getInstanceOf("max"), this.appID);
this.messages.put(template, type);
return true;
} }
|
public class class_name {
@VisibleForTesting
static Level getLogLevel(Throwable t) {
if (t instanceof IOException
&& t.getMessage() != null
&& QUIET_ERRORS.contains(t.getMessage())) {
return Level.FINE;
}
return Level.INFO;
} }
|
public class class_name {
@VisibleForTesting
static Level getLogLevel(Throwable t) {
if (t instanceof IOException
&& t.getMessage() != null
&& QUIET_ERRORS.contains(t.getMessage())) {
return Level.FINE; // depends on control dependency: [if], data = [none]
}
return Level.INFO;
} }
|
public class class_name {
public void processDocument(BufferedReader document) throws IOException {
// Iterate over all of the parseable dependency parsed sentences in the
// document.
for (DependencyTreeNode[] nodes = null;
(nodes = parser.readNextTree(document)) != null; ) {
// Skip empty documents.
if (nodes.length == 0)
continue;
// Examine the paths for each word in the sentence.
for (int i = 0; i < nodes.length; ++i) {
String focusWord = nodes[i].word();
// Skip words that are rejected by the semantic filter.
if (!acceptWord(focusWord))
continue;
// Acquire the semantic vector for the focus word.
IntegerVector focusMeaning = getSemanticVector(focusWord);
// Create the path iterator for all acceptable paths rooted at
// the focus word in the sentence.
Iterator<DependencyPath> pathIter =
new DependencyIterator(nodes[i], acceptor, pathLength);
// For every path, obtain the index vector of the last word in
// the path and add it to the semantic vector for the focus
// word. The index vector is permuted if a permutation
// function has been provided based on the contents of the path.
while (pathIter.hasNext()) {
DependencyPath path = pathIter.next();
TernaryVector termVector = indexMap.get(path.last().word());
if (permFunc != null)
termVector = permFunc.permute(termVector, path);
add(focusMeaning, termVector);
}
}
}
document.close();
} }
|
public class class_name {
public void processDocument(BufferedReader document) throws IOException {
// Iterate over all of the parseable dependency parsed sentences in the
// document.
for (DependencyTreeNode[] nodes = null;
(nodes = parser.readNextTree(document)) != null; ) {
// Skip empty documents.
if (nodes.length == 0)
continue;
// Examine the paths for each word in the sentence.
for (int i = 0; i < nodes.length; ++i) {
String focusWord = nodes[i].word();
// Skip words that are rejected by the semantic filter.
if (!acceptWord(focusWord))
continue;
// Acquire the semantic vector for the focus word.
IntegerVector focusMeaning = getSemanticVector(focusWord);
// Create the path iterator for all acceptable paths rooted at
// the focus word in the sentence.
Iterator<DependencyPath> pathIter =
new DependencyIterator(nodes[i], acceptor, pathLength);
// For every path, obtain the index vector of the last word in
// the path and add it to the semantic vector for the focus
// word. The index vector is permuted if a permutation
// function has been provided based on the contents of the path.
while (pathIter.hasNext()) {
DependencyPath path = pathIter.next();
TernaryVector termVector = indexMap.get(path.last().word());
if (permFunc != null)
termVector = permFunc.permute(termVector, path);
add(focusMeaning, termVector); // depends on control dependency: [while], data = [none]
}
}
}
document.close();
} }
|
public class class_name {
protected void initialize(Class<T> mappedClass) {
this.mappedClass = mappedClass;
this.mappedFields = new HashMap<String, PropertyDescriptor>();
PropertyDescriptor[] pds = null;
try {
/**
* 返回bean的属性描述对象数组
*/
pds = propertyDescriptors(mappedClass);
} catch (SQLException e) {
throw new SnakerException(e.getMessage(), e.getCause());
}
for (PropertyDescriptor pd : pds) {
if (pd.getWriteMethod() != null) {
this.mappedFields.put(pd.getName().toLowerCase(), pd);
String underscoredName = underscoreName(pd.getName());
if (!pd.getName().toLowerCase().equals(underscoredName)) {
this.mappedFields.put(underscoredName, pd);
}
}
}
} }
|
public class class_name {
protected void initialize(Class<T> mappedClass) {
this.mappedClass = mappedClass;
this.mappedFields = new HashMap<String, PropertyDescriptor>();
PropertyDescriptor[] pds = null;
try {
/**
* 返回bean的属性描述对象数组
*/
pds = propertyDescriptors(mappedClass); // depends on control dependency: [try], data = [none]
} catch (SQLException e) {
throw new SnakerException(e.getMessage(), e.getCause());
} // depends on control dependency: [catch], data = [none]
for (PropertyDescriptor pd : pds) {
if (pd.getWriteMethod() != null) {
this.mappedFields.put(pd.getName().toLowerCase(), pd); // depends on control dependency: [if], data = [none]
String underscoredName = underscoreName(pd.getName());
if (!pd.getName().toLowerCase().equals(underscoredName)) {
this.mappedFields.put(underscoredName, pd); // depends on control dependency: [if], data = [none]
}
}
}
} }
|
public class class_name {
public void setEdition(final String edition) {
if (edition == null && this.edition == null) {
return;
} else if (edition == null) {
removeChild(this.edition);
this.edition = null;
} else if (this.edition == null) {
this.edition = new KeyValueNode<String>(CommonConstants.CS_EDITION_TITLE, edition);
appendChild(this.edition, false);
} else {
this.edition.setValue(edition);
}
} }
|
public class class_name {
public void setEdition(final String edition) {
if (edition == null && this.edition == null) {
return; // depends on control dependency: [if], data = [none]
} else if (edition == null) {
removeChild(this.edition); // depends on control dependency: [if], data = [none]
this.edition = null; // depends on control dependency: [if], data = [none]
} else if (this.edition == null) {
this.edition = new KeyValueNode<String>(CommonConstants.CS_EDITION_TITLE, edition); // depends on control dependency: [if], data = [none]
appendChild(this.edition, false); // depends on control dependency: [if], data = [(this.edition]
} else {
this.edition.setValue(edition); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public void marshall(NetworkInterface networkInterface, ProtocolMarshaller protocolMarshaller) {
if (networkInterface == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(networkInterface.getIpv4Address(), IPV4ADDRESS_BINDING);
protocolMarshaller.marshall(networkInterface.getMacAddress(), MACADDRESS_BINDING);
protocolMarshaller.marshall(networkInterface.getIpv6Address(), IPV6ADDRESS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(NetworkInterface networkInterface, ProtocolMarshaller protocolMarshaller) {
if (networkInterface == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(networkInterface.getIpv4Address(), IPV4ADDRESS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(networkInterface.getMacAddress(), MACADDRESS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(networkInterface.getIpv6Address(), IPV6ADDRESS_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
@Override
public ConsoleEditor create(String flavor, Terminal terminal, InputStream in, PrintStream out) throws EditorInitializationException {
if (flavorMap.containsKey(flavor)) {
Class<? extends ConsoleEditor> editorClass = flavorMap.get(flavor);
try {
return instantiate(editorClass, terminal, in, out);
} catch (Exception e) {
throw new EditorInitializationException("Failed to create Editor instance of class:" + editorClass.getName(), e);
}
} else {
Class<? extends ConsoleEditor> editorClass = resolve(flavor);
if (editorClass != null) {
flavorMap.put(flavor, editorClass);
return create(flavor, terminal);
} else {
throw new EditorInitializationException("Unknown flavor:" + flavor);
}
}
} }
|
public class class_name {
@Override
public ConsoleEditor create(String flavor, Terminal terminal, InputStream in, PrintStream out) throws EditorInitializationException {
if (flavorMap.containsKey(flavor)) {
Class<? extends ConsoleEditor> editorClass = flavorMap.get(flavor);
try {
return instantiate(editorClass, terminal, in, out); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new EditorInitializationException("Failed to create Editor instance of class:" + editorClass.getName(), e);
} // depends on control dependency: [catch], data = [none]
} else {
Class<? extends ConsoleEditor> editorClass = resolve(flavor);
if (editorClass != null) {
flavorMap.put(flavor, editorClass); // depends on control dependency: [if], data = [none]
return create(flavor, terminal); // depends on control dependency: [if], data = [none]
} else {
throw new EditorInitializationException("Unknown flavor:" + flavor);
}
}
} }
|
public class class_name {
public static void setDebugging(boolean debugging) {
FuzzyLite.debugging = debugging;
if (isLogging()) {
logger.setLevel(debugging ? Level.FINE : Level.INFO);
}
} }
|
public class class_name {
public static void setDebugging(boolean debugging) {
FuzzyLite.debugging = debugging;
if (isLogging()) {
logger.setLevel(debugging ? Level.FINE : Level.INFO); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@UiThread
public void notifyChildRemoved(int parentPosition, int childPosition) {
int flatParentPosition = getFlatParentPosition(parentPosition);
ExpandableWrapper<P, C> parentWrapper = mFlatItemList.get(flatParentPosition);
parentWrapper.setParent(mParentList.get(parentPosition));
if (parentWrapper.isExpanded()) {
mFlatItemList.remove(flatParentPosition + childPosition + 1);
notifyItemRemoved(flatParentPosition + childPosition + 1);
}
} }
|
public class class_name {
@UiThread
public void notifyChildRemoved(int parentPosition, int childPosition) {
int flatParentPosition = getFlatParentPosition(parentPosition);
ExpandableWrapper<P, C> parentWrapper = mFlatItemList.get(flatParentPosition);
parentWrapper.setParent(mParentList.get(parentPosition));
if (parentWrapper.isExpanded()) {
mFlatItemList.remove(flatParentPosition + childPosition + 1); // depends on control dependency: [if], data = [none]
notifyItemRemoved(flatParentPosition + childPosition + 1); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private void prepareNextIterator()
{
if (currentIterator == null || !currentIterator.hasNext())
{
currentIterator = null;
while (iteratorIterator.hasNext())
{
currentIterator = iteratorIterator.next();
if (currentIterator.hasNext())
{
break;
}
currentIterator = null;
}
}
} }
|
public class class_name {
private void prepareNextIterator()
{
if (currentIterator == null || !currentIterator.hasNext())
{
currentIterator = null;
// depends on control dependency: [if], data = [none]
while (iteratorIterator.hasNext())
{
currentIterator = iteratorIterator.next();
// depends on control dependency: [while], data = [none]
if (currentIterator.hasNext())
{
break;
}
currentIterator = null;
// depends on control dependency: [while], data = [none]
}
}
} }
|
public class class_name {
@SuppressWarnings("deprecation")
public static @ColorInt int resolveColor(@ColorRes int color, Context context) {
if (Build.VERSION.SDK_INT >= 23) {
return context.getResources().getColor(color, context.getTheme());
}
else {
return context.getResources().getColor(color);
}
} }
|
public class class_name {
@SuppressWarnings("deprecation")
public static @ColorInt int resolveColor(@ColorRes int color, Context context) {
if (Build.VERSION.SDK_INT >= 23) {
return context.getResources().getColor(color, context.getTheme()); // depends on control dependency: [if], data = [none]
}
else {
return context.getResources().getColor(color); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
protected boolean isValidImplementedType(String className) throws JavaModelException {
if (!Strings.isNullOrEmpty(className)) {
final IType rootType = getRootSuperInterface();
assert rootType != null;
final IType type = getJavaProject().findType(className);
assert type != null;
final ITypeHierarchy hierarchy = type.newSupertypeHierarchy(new NullProgressMonitor());
assert hierarchy != null;
if (!hierarchy.contains(rootType)) {
return false;
}
}
return true;
} }
|
public class class_name {
protected boolean isValidImplementedType(String className) throws JavaModelException {
if (!Strings.isNullOrEmpty(className)) {
final IType rootType = getRootSuperInterface();
assert rootType != null;
final IType type = getJavaProject().findType(className);
assert type != null;
final ITypeHierarchy hierarchy = type.newSupertypeHierarchy(new NullProgressMonitor());
assert hierarchy != null;
if (!hierarchy.contains(rootType)) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
} }
|
public class class_name {
private void purgeDeadJobInstances(DbConn cnx, Node node)
{
for (JobInstance ji : JobInstance.select(cnx, "ji_select_by_node", node.getId()))
{
try
{
cnx.runSelectSingle("history_select_state_by_id", String.class, ji.getId());
}
catch (NoResultException e)
{
History.create(cnx, ji, State.CRASHED, Calendar.getInstance());
Message.create(cnx,
"Job was supposed to be running at server startup - usually means it was killed along a server by an admin or a crash",
ji.getId());
}
cnx.runUpdate("ji_delete_by_id", ji.getId());
}
cnx.commit();
} }
|
public class class_name {
private void purgeDeadJobInstances(DbConn cnx, Node node)
{
for (JobInstance ji : JobInstance.select(cnx, "ji_select_by_node", node.getId()))
{
try
{
cnx.runSelectSingle("history_select_state_by_id", String.class, ji.getId()); // depends on control dependency: [try], data = [none]
}
catch (NoResultException e)
{
History.create(cnx, ji, State.CRASHED, Calendar.getInstance());
Message.create(cnx,
"Job was supposed to be running at server startup - usually means it was killed along a server by an admin or a crash",
ji.getId());
} // depends on control dependency: [catch], data = [none]
cnx.runUpdate("ji_delete_by_id", ji.getId()); // depends on control dependency: [for], data = [ji]
}
cnx.commit();
} }
|
public class class_name {
private void processGetForm(HttpMessage message, int depth, String action, String baseURL, FormData formData) {
for (String submitData : formData) {
log.debug("Submiting form with GET method and query with form parameters: " + submitData);
processURL(message, depth, action + submitData, baseURL);
}
} }
|
public class class_name {
private void processGetForm(HttpMessage message, int depth, String action, String baseURL, FormData formData) {
for (String submitData : formData) {
log.debug("Submiting form with GET method and query with form parameters: " + submitData); // depends on control dependency: [for], data = [submitData]
processURL(message, depth, action + submitData, baseURL); // depends on control dependency: [for], data = [submitData]
}
} }
|
public class class_name {
public static void fillWidth(double x0, double y0, double derivateX, double derivateY, double width, PlotOperator plot)
{
double halfWidth = width/2.0;
if (Math.abs(derivateY) <= Double.MIN_VALUE)
{
drawLine((int)(x0), (int)(y0+halfWidth), (int)(x0), (int)(y0-halfWidth), plot);
}
else
{
double s = derivateX/-derivateY;
double w2 = halfWidth*halfWidth;
double x1 = Math.sqrt(w2/(s*s+1));
double y1 = s*x1;
drawLine((int)(x0+x1), (int)(y0+y1), (int)(x0-x1), (int)(y0-y1), plot);
}
} }
|
public class class_name {
public static void fillWidth(double x0, double y0, double derivateX, double derivateY, double width, PlotOperator plot)
{
double halfWidth = width/2.0;
if (Math.abs(derivateY) <= Double.MIN_VALUE)
{
drawLine((int)(x0), (int)(y0+halfWidth), (int)(x0), (int)(y0-halfWidth), plot);
// depends on control dependency: [if], data = [none]
}
else
{
double s = derivateX/-derivateY;
double w2 = halfWidth*halfWidth;
double x1 = Math.sqrt(w2/(s*s+1));
double y1 = s*x1;
drawLine((int)(x0+x1), (int)(y0+y1), (int)(x0-x1), (int)(y0-y1), plot);
// depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private List<MetricSchemaRecord> _getUniqueFastScan(MetricSchemaRecordQuery query, final RecordType type) {
requireNotDisposed();
SystemAssert.requireArgument(RecordType.METRIC.equals(type) || RecordType.SCOPE.equals(type),
"This method is only for use with metric or scope.");
_logger.info("Using FastScan. Will skip rows while scanning.");
final Set<MetricSchemaRecord> records = new HashSet<>();
final ScanMetadata metadata = _constructScanMetadata(query);
String namespace = SchemaService.convertToRegex(query.getNamespace());
String scope = SchemaService.convertToRegex(query.getScope());
String metric = SchemaService.convertToRegex(query.getMetric());
String tagKey = SchemaService.convertToRegex(query.getTagKey());
String tagValue = SchemaService.convertToRegex(query.getTagValue());
MetricSchemaRecord scanFrom = query.getScanFrom();
String rowKeyRegex = "^" + _constructRowKey(namespace, scope, metric, tagKey, tagValue, metadata.tableName) + "$";
List<ScanFilter> filters = new ArrayList<ScanFilter>();
filters.add(new RowFilter(CompareOp.EQUAL, new RegexStringComparator(rowKeyRegex)));
filters.add(new KeyOnlyFilter());
filters.add(new FirstKeyOnlyFilter());
FilterList filterList = new FilterList(filters, FilterList.Operator.MUST_PASS_ALL);
String start = scanFrom == null ? Bytes.toString(metadata.startRow)
: _plusOneNConstructRowKey(scanFrom, metadata.tableName, type);
String end = Bytes.toString(metadata.stopRow);
ArrayList<ArrayList<KeyValue>> rows = _getSingleRow(start, end, filterList, metadata.tableName);
while(rows != null && !rows.isEmpty()) {
String rowKey = Bytes.toString(rows.get(0).get(0).key());
String splits[] = rowKey.split(String.valueOf(ROWKEY_SEPARATOR));
String record = (RecordType.METRIC.equals(type) && metadata.tableName.equals(METRIC_SCHEMA_TABLENAME)) ||
(RecordType.SCOPE.equals(type) && metadata.tableName.equals(SCOPE_SCHEMA_TABLENAME)) ? splits[0] : splits[1];
MetricSchemaRecord schemaRecord = RecordType.METRIC.equals(type) ?
new MetricSchemaRecord(null, record) : new MetricSchemaRecord(record, null);
records.add(schemaRecord);
if(records.size() == query.getLimit()) {
break;
}
String newScanStart;
if(!SchemaService.containsFilter(query.getScope()) || !SchemaService.containsFilter(query.getMetric())) {
newScanStart = _plusOne(record);
} else {
newScanStart = _plusOne(splits[0] + ROWKEY_SEPARATOR + splits[1]);
}
rows = _getSingleRow(newScanStart, end, filterList, metadata.tableName);
}
return new ArrayList<>(records);
} }
|
public class class_name {
private List<MetricSchemaRecord> _getUniqueFastScan(MetricSchemaRecordQuery query, final RecordType type) {
requireNotDisposed();
SystemAssert.requireArgument(RecordType.METRIC.equals(type) || RecordType.SCOPE.equals(type),
"This method is only for use with metric or scope.");
_logger.info("Using FastScan. Will skip rows while scanning.");
final Set<MetricSchemaRecord> records = new HashSet<>();
final ScanMetadata metadata = _constructScanMetadata(query);
String namespace = SchemaService.convertToRegex(query.getNamespace());
String scope = SchemaService.convertToRegex(query.getScope());
String metric = SchemaService.convertToRegex(query.getMetric());
String tagKey = SchemaService.convertToRegex(query.getTagKey());
String tagValue = SchemaService.convertToRegex(query.getTagValue());
MetricSchemaRecord scanFrom = query.getScanFrom();
String rowKeyRegex = "^" + _constructRowKey(namespace, scope, metric, tagKey, tagValue, metadata.tableName) + "$";
List<ScanFilter> filters = new ArrayList<ScanFilter>();
filters.add(new RowFilter(CompareOp.EQUAL, new RegexStringComparator(rowKeyRegex)));
filters.add(new KeyOnlyFilter());
filters.add(new FirstKeyOnlyFilter());
FilterList filterList = new FilterList(filters, FilterList.Operator.MUST_PASS_ALL);
String start = scanFrom == null ? Bytes.toString(metadata.startRow)
: _plusOneNConstructRowKey(scanFrom, metadata.tableName, type);
String end = Bytes.toString(metadata.stopRow);
ArrayList<ArrayList<KeyValue>> rows = _getSingleRow(start, end, filterList, metadata.tableName);
while(rows != null && !rows.isEmpty()) {
String rowKey = Bytes.toString(rows.get(0).get(0).key());
String splits[] = rowKey.split(String.valueOf(ROWKEY_SEPARATOR));
String record = (RecordType.METRIC.equals(type) && metadata.tableName.equals(METRIC_SCHEMA_TABLENAME)) ||
(RecordType.SCOPE.equals(type) && metadata.tableName.equals(SCOPE_SCHEMA_TABLENAME)) ? splits[0] : splits[1];
MetricSchemaRecord schemaRecord = RecordType.METRIC.equals(type) ?
new MetricSchemaRecord(null, record) : new MetricSchemaRecord(record, null);
records.add(schemaRecord); // depends on control dependency: [while], data = [none]
if(records.size() == query.getLimit()) {
break;
}
String newScanStart;
if(!SchemaService.containsFilter(query.getScope()) || !SchemaService.containsFilter(query.getMetric())) {
newScanStart = _plusOne(record); // depends on control dependency: [if], data = [none]
} else {
newScanStart = _plusOne(splits[0] + ROWKEY_SEPARATOR + splits[1]); // depends on control dependency: [if], data = [none]
}
rows = _getSingleRow(newScanStart, end, filterList, metadata.tableName); // depends on control dependency: [while], data = [none]
}
return new ArrayList<>(records);
} }
|
public class class_name {
@SuppressWarnings("unchecked")
public HiveTypeInfo generateHiveTypeInfoFromMetadataField(Field hiveTypeInfoField) throws StageException {
if (hiveTypeInfoField.getType() == Field.Type.MAP) {
Map<String, Field> fields = (Map<String, Field>) hiveTypeInfoField.getValue();
if (!fields.containsKey(HiveMetastoreUtil.TYPE)
|| !fields.containsKey(HiveMetastoreUtil.EXTRA_INFO)) {
throw new StageException(Errors.HIVE_17, HiveMetastoreUtil.TYPE_INFO);
}
HiveType hiveType = HiveType.getHiveTypeFromString(fields.get(HiveMetastoreUtil.TYPE).getValueAsString());
String comment = "";
if(fields.containsKey(HiveMetastoreUtil.COMMENT)) {
comment = fields.get(HiveMetastoreUtil.COMMENT).getValueAsString();
}
return generateHiveTypeInfoFromMetadataField(hiveType, comment, fields.get(HiveMetastoreUtil.EXTRA_INFO));
} else {
throw new StageException(Errors.HIVE_17, HiveMetastoreUtil.TYPE_INFO);
}
} }
|
public class class_name {
@SuppressWarnings("unchecked")
public HiveTypeInfo generateHiveTypeInfoFromMetadataField(Field hiveTypeInfoField) throws StageException {
if (hiveTypeInfoField.getType() == Field.Type.MAP) {
Map<String, Field> fields = (Map<String, Field>) hiveTypeInfoField.getValue();
if (!fields.containsKey(HiveMetastoreUtil.TYPE)
|| !fields.containsKey(HiveMetastoreUtil.EXTRA_INFO)) {
throw new StageException(Errors.HIVE_17, HiveMetastoreUtil.TYPE_INFO);
}
HiveType hiveType = HiveType.getHiveTypeFromString(fields.get(HiveMetastoreUtil.TYPE).getValueAsString());
String comment = "";
if(fields.containsKey(HiveMetastoreUtil.COMMENT)) {
comment = fields.get(HiveMetastoreUtil.COMMENT).getValueAsString(); // depends on control dependency: [if], data = [none]
}
return generateHiveTypeInfoFromMetadataField(hiveType, comment, fields.get(HiveMetastoreUtil.EXTRA_INFO));
} else {
throw new StageException(Errors.HIVE_17, HiveMetastoreUtil.TYPE_INFO);
}
} }
|
public class class_name {
public EEnum getIfcWasteTerminalTypeEnum() {
if (ifcWasteTerminalTypeEnumEEnum == null) {
ifcWasteTerminalTypeEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(928);
}
return ifcWasteTerminalTypeEnumEEnum;
} }
|
public class class_name {
public EEnum getIfcWasteTerminalTypeEnum() {
if (ifcWasteTerminalTypeEnumEEnum == null) {
ifcWasteTerminalTypeEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(928);
// depends on control dependency: [if], data = [none]
}
return ifcWasteTerminalTypeEnumEEnum;
} }
|
public class class_name {
public void eInit(SarlScript script, String name, IJvmTypeProvider context) {
setTypeResolutionContext(context);
if (this.sarlEvent == null) {
this.sarlEvent = SarlFactory.eINSTANCE.createSarlEvent();
script.getXtendTypes().add(this.sarlEvent);
this.sarlEvent.setAnnotationInfo(XtendFactory.eINSTANCE.createXtendTypeDeclaration());
if (!Strings.isEmpty(name)) {
this.sarlEvent.setName(name);
}
}
} }
|
public class class_name {
public void eInit(SarlScript script, String name, IJvmTypeProvider context) {
setTypeResolutionContext(context);
if (this.sarlEvent == null) {
this.sarlEvent = SarlFactory.eINSTANCE.createSarlEvent(); // depends on control dependency: [if], data = [none]
script.getXtendTypes().add(this.sarlEvent); // depends on control dependency: [if], data = [(this.sarlEvent]
this.sarlEvent.setAnnotationInfo(XtendFactory.eINSTANCE.createXtendTypeDeclaration()); // depends on control dependency: [if], data = [none]
if (!Strings.isEmpty(name)) {
this.sarlEvent.setName(name); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public byte[] toBytes(final int padding) {
// special case a single entry
if (padding == 0 && segments.size() == 1) {
BufferSegment seg = segments.get(0);
if (seg.offset == 0 && seg.len == seg.buf.length) {
return seg.buf;
}
return Arrays.copyOfRange(seg.buf, seg.offset, seg.offset + seg.len);
}
byte[] result = new byte[total_length + padding];
int ofs = 0;
for (BufferSegment seg : segments) {
System.arraycopy(seg.buf, seg.offset, result, ofs, seg.len);
ofs += seg.len;
}
return result;
} }
|
public class class_name {
public byte[] toBytes(final int padding) {
// special case a single entry
if (padding == 0 && segments.size() == 1) {
BufferSegment seg = segments.get(0);
if (seg.offset == 0 && seg.len == seg.buf.length) {
return seg.buf; // depends on control dependency: [if], data = [none]
}
return Arrays.copyOfRange(seg.buf, seg.offset, seg.offset + seg.len); // depends on control dependency: [if], data = [none]
}
byte[] result = new byte[total_length + padding];
int ofs = 0;
for (BufferSegment seg : segments) {
System.arraycopy(seg.buf, seg.offset, result, ofs, seg.len); // depends on control dependency: [for], data = [seg]
ofs += seg.len; // depends on control dependency: [for], data = [seg]
}
return result;
} }
|
public class class_name {
public ClassBuilder<T> withMethod(String methodName, Class<?> returnClass, List<? extends Class<?>> argumentTypes, Expression expression) {
Type[] types = new Type[argumentTypes.size()];
for (int i = 0; i < argumentTypes.size(); i++) {
types[i] = getType(argumentTypes.get(i));
}
return withMethod(new Method(methodName, getType(returnClass), types), expression);
} }
|
public class class_name {
public ClassBuilder<T> withMethod(String methodName, Class<?> returnClass, List<? extends Class<?>> argumentTypes, Expression expression) {
Type[] types = new Type[argumentTypes.size()];
for (int i = 0; i < argumentTypes.size(); i++) {
types[i] = getType(argumentTypes.get(i)); // depends on control dependency: [for], data = [i]
}
return withMethod(new Method(methodName, getType(returnClass), types), expression);
} }
|
public class class_name {
public ModelPackageStatusDetails withValidationStatuses(ModelPackageStatusItem... validationStatuses) {
if (this.validationStatuses == null) {
setValidationStatuses(new java.util.ArrayList<ModelPackageStatusItem>(validationStatuses.length));
}
for (ModelPackageStatusItem ele : validationStatuses) {
this.validationStatuses.add(ele);
}
return this;
} }
|
public class class_name {
public ModelPackageStatusDetails withValidationStatuses(ModelPackageStatusItem... validationStatuses) {
if (this.validationStatuses == null) {
setValidationStatuses(new java.util.ArrayList<ModelPackageStatusItem>(validationStatuses.length)); // depends on control dependency: [if], data = [none]
}
for (ModelPackageStatusItem ele : validationStatuses) {
this.validationStatuses.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
public void addForward( ForwardModel newActionForward )
{
if ( _forwards.containsKey( newActionForward.getName() ) )
{
// TODO: Rich - replace this with something other than the knex logger so that the xdoclet compiler
// won't require knex
// if ( ! fwd.getPath().equals( newActionForward.getPath() ) )
// {
// logger.warn( "Could not add forward \"" + newActionForward.getName() + "\", path=\""
// + newActionForward.getPath() + "\" because there is already a forward with"
// + " the same name (path=\"" + fwd.getPath() + "\")." );
// }
return;
}
_forwards.put( newActionForward.getName(), newActionForward );
} }
|
public class class_name {
public void addForward( ForwardModel newActionForward )
{
if ( _forwards.containsKey( newActionForward.getName() ) )
{
// TODO: Rich - replace this with something other than the knex logger so that the xdoclet compiler
// won't require knex
// if ( ! fwd.getPath().equals( newActionForward.getPath() ) )
// {
// logger.warn( "Could not add forward \"" + newActionForward.getName() + "\", path=\""
// + newActionForward.getPath() + "\" because there is already a forward with"
// + " the same name (path=\"" + fwd.getPath() + "\")." );
// }
return; // depends on control dependency: [if], data = [none]
}
_forwards.put( newActionForward.getName(), newActionForward );
} }
|
public class class_name {
public boolean awaitUninterruptibly(final long timeout, final TimeUnit unit) {
boolean interrupted = Thread.interrupted();
long now = System.nanoTime();
long remaining = unit.toNanos(timeout);
try {
while (true) {
if (remaining <= 0L) return false;
try {
return await(remaining, TimeUnit.NANOSECONDS);
} catch (InterruptedException ie) {
interrupted = true;
remaining -= (-now + (now = System.nanoTime()));
}
}
} finally {
if (interrupted) Thread.currentThread().interrupt();
}
} }
|
public class class_name {
public boolean awaitUninterruptibly(final long timeout, final TimeUnit unit) {
boolean interrupted = Thread.interrupted();
long now = System.nanoTime();
long remaining = unit.toNanos(timeout);
try {
while (true) {
if (remaining <= 0L) return false;
try {
return await(remaining, TimeUnit.NANOSECONDS); // depends on control dependency: [try], data = [none]
} catch (InterruptedException ie) {
interrupted = true;
remaining -= (-now + (now = System.nanoTime()));
} // depends on control dependency: [catch], data = [none]
}
} finally {
if (interrupted) Thread.currentThread().interrupt();
}
} }
|
public class class_name {
public TimestampInterval move(
long amount,
IsoUnit unit
) {
if (amount == 0) {
return this;
}
Boundary<PlainTimestamp> s;
Boundary<PlainTimestamp> e;
if (this.getStart().isInfinite()) {
s = Boundary.infinitePast();
} else {
s =
Boundary.of(
this.getStart().getEdge(),
this.getStart().getTemporal().plus(amount, unit));
}
if (this.getEnd().isInfinite()) {
e = Boundary.infiniteFuture();
} else {
e =
Boundary.of(
this.getEnd().getEdge(),
this.getEnd().getTemporal().plus(amount, unit));
}
return new TimestampInterval(s, e);
} }
|
public class class_name {
public TimestampInterval move(
long amount,
IsoUnit unit
) {
if (amount == 0) {
return this; // depends on control dependency: [if], data = [none]
}
Boundary<PlainTimestamp> s;
Boundary<PlainTimestamp> e;
if (this.getStart().isInfinite()) {
s = Boundary.infinitePast(); // depends on control dependency: [if], data = [none]
} else {
s =
Boundary.of(
this.getStart().getEdge(),
this.getStart().getTemporal().plus(amount, unit)); // depends on control dependency: [if], data = [none]
}
if (this.getEnd().isInfinite()) {
e = Boundary.infiniteFuture(); // depends on control dependency: [if], data = [none]
} else {
e =
Boundary.of(
this.getEnd().getEdge(),
this.getEnd().getTemporal().plus(amount, unit)); // depends on control dependency: [if], data = [none]
}
return new TimestampInterval(s, e);
} }
|
public class class_name {
public final AntlrDatatypeRuleToken ruleOpSingleAssign() throws RecognitionException {
AntlrDatatypeRuleToken current = new AntlrDatatypeRuleToken();
Token kw=null;
enterRule();
try {
// InternalPureXbase.g:919:2: (kw= '=' )
// InternalPureXbase.g:920:2: kw= '='
{
kw=(Token)match(input,20,FOLLOW_2); if (state.failed) return current;
if ( state.backtracking==0 ) {
current.merge(kw);
newLeafNode(kw, grammarAccess.getOpSingleAssignAccess().getEqualsSignKeyword());
}
}
if ( state.backtracking==0 ) {
leaveRule();
}
}
catch (RecognitionException re) {
recover(input,re);
appendSkippedTokens();
}
finally {
}
return current;
} }
|
public class class_name {
public final AntlrDatatypeRuleToken ruleOpSingleAssign() throws RecognitionException {
AntlrDatatypeRuleToken current = new AntlrDatatypeRuleToken();
Token kw=null;
enterRule();
try {
// InternalPureXbase.g:919:2: (kw= '=' )
// InternalPureXbase.g:920:2: kw= '='
{
kw=(Token)match(input,20,FOLLOW_2); if (state.failed) return current;
if ( state.backtracking==0 ) {
current.merge(kw); // depends on control dependency: [if], data = [none]
newLeafNode(kw, grammarAccess.getOpSingleAssignAccess().getEqualsSignKeyword()); // depends on control dependency: [if], data = [none]
}
}
if ( state.backtracking==0 ) {
leaveRule(); // depends on control dependency: [if], data = [none]
}
}
catch (RecognitionException re) {
recover(input,re);
appendSkippedTokens();
}
finally {
}
return current;
} }
|
public class class_name {
public void apairs_from_idxlst(JointFragments jf) {
List<int[]> il = jf.getIdxlist();
//System.out.println("Alt Alig apairs_from_idxlst");
aligpath = new IndexPair[il.size()];
idx1 = new int[il.size()];
idx2 = new int[il.size()];
for (int i =0 ; i < il.size();i++) {
int[] p = il.get(i);
//System.out.print(" idx1 " + p[0] + " idx2 " + p[1]);
idx1[i] = p[0];
idx2[i] = p[1];
aligpath[i] = new IndexPair((short)p[0],(short)p[1]);
}
eqr0 = idx1.length;
//System.out.println("eqr " + eqr0);
gaps0 = count_gaps(idx1,idx2);
} }
|
public class class_name {
public void apairs_from_idxlst(JointFragments jf) {
List<int[]> il = jf.getIdxlist();
//System.out.println("Alt Alig apairs_from_idxlst");
aligpath = new IndexPair[il.size()];
idx1 = new int[il.size()];
idx2 = new int[il.size()];
for (int i =0 ; i < il.size();i++) {
int[] p = il.get(i);
//System.out.print(" idx1 " + p[0] + " idx2 " + p[1]);
idx1[i] = p[0]; // depends on control dependency: [for], data = [i]
idx2[i] = p[1]; // depends on control dependency: [for], data = [i]
aligpath[i] = new IndexPair((short)p[0],(short)p[1]); // depends on control dependency: [for], data = [i]
}
eqr0 = idx1.length;
//System.out.println("eqr " + eqr0);
gaps0 = count_gaps(idx1,idx2);
} }
|
public class class_name {
public static <T> Iterable<T> skip(final Iterable<T> iterable, final int numberToSkip) {
checkNotNull(iterable);
checkArgument(numberToSkip >= 0, "number to skip cannot be negative");
if (iterable instanceof List) {
final List<T> list = (List<T>) iterable;
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
// TODO(kevinb): Support a concurrently modified collection?
int toSkip = Math.min(list.size(), numberToSkip);
return list.subList(toSkip, list.size()).iterator();
}
};
}
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
final Iterator<T> iterator = iterable.iterator();
Iterators.advance(iterator, numberToSkip);
/*
* We can't just return the iterator because an immediate call to its
* remove() method would remove one of the skipped elements instead of
* throwing an IllegalStateException.
*/
return new Iterator<T>() {
boolean atStart = true;
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public T next() {
T result = iterator.next();
atStart = false; // not called if next() fails
return result;
}
@Override
public void remove() {
checkRemove(!atStart);
iterator.remove();
}
};
}
};
} }
|
public class class_name {
public static <T> Iterable<T> skip(final Iterable<T> iterable, final int numberToSkip) {
checkNotNull(iterable);
checkArgument(numberToSkip >= 0, "number to skip cannot be negative");
if (iterable instanceof List) {
final List<T> list = (List<T>) iterable;
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
// TODO(kevinb): Support a concurrently modified collection?
int toSkip = Math.min(list.size(), numberToSkip);
return list.subList(toSkip, list.size()).iterator();
}
}; // depends on control dependency: [if], data = [none]
}
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
final Iterator<T> iterator = iterable.iterator();
Iterators.advance(iterator, numberToSkip);
/*
* We can't just return the iterator because an immediate call to its
* remove() method would remove one of the skipped elements instead of
* throwing an IllegalStateException.
*/
return new Iterator<T>() {
boolean atStart = true;
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public T next() {
T result = iterator.next();
atStart = false; // not called if next() fails
return result;
}
@Override
public void remove() {
checkRemove(!atStart);
iterator.remove();
}
};
}
};
} }
|
public class class_name {
@Override
public void close() throws SQLException {
switch(transactionState) {
case CLOSED: //Segment close will ignore multiple close on same connection, but need to skip the other stuff.
return;
case NONE: //Autocommit never changed
transactionState = TransactionState.CLOSED;
break;
default:
try {
resolveIncompleteTransactions();
transactionState = TransactionState.CLOSED;
} catch(Throwable t) {
setLogicalCloseException(t);
} finally {
try {
if(!conn.getAutoCommit()) { //Set auto-commit
conn.setAutoCommit(true);
}
} catch(Throwable t) {
setLogicalCloseException(t);
}
}
}
boolean openStatements = closeStatements();
Throwable logicalCloseException = this.logicalCloseException; //Preserve the exception - segment will set to null.
segment.close(this);
if(logicalCloseException != null) { //Now throw to the caller
Util.throwException(logicalCloseException);
}
if(openStatements && openStatementPolicy == OpenStatementPolicy.REPORT) {
throw new SQLException("Connection has open statements", JDBConnection.SQLSTATE_CONNECTION_EXCEPTION);
}
} }
|
public class class_name {
@Override
public void close() throws SQLException {
switch(transactionState) {
case CLOSED: //Segment close will ignore multiple close on same connection, but need to skip the other stuff.
return;
case NONE: //Autocommit never changed
transactionState = TransactionState.CLOSED;
break;
default:
try {
resolveIncompleteTransactions(); // depends on control dependency: [try], data = [none]
transactionState = TransactionState.CLOSED; // depends on control dependency: [try], data = [none]
} catch(Throwable t) {
setLogicalCloseException(t);
} finally { // depends on control dependency: [catch], data = [none]
try {
if(!conn.getAutoCommit()) { //Set auto-commit
conn.setAutoCommit(true); // depends on control dependency: [if], data = [none]
}
} catch(Throwable t) {
setLogicalCloseException(t);
} // depends on control dependency: [catch], data = [none]
}
}
boolean openStatements = closeStatements();
Throwable logicalCloseException = this.logicalCloseException; //Preserve the exception - segment will set to null.
segment.close(this);
if(logicalCloseException != null) { //Now throw to the caller
Util.throwException(logicalCloseException);
}
if(openStatements && openStatementPolicy == OpenStatementPolicy.REPORT) {
throw new SQLException("Connection has open statements", JDBConnection.SQLSTATE_CONNECTION_EXCEPTION);
}
} }
|
public class class_name {
public void localBegin()
{
if (this.isInLocalTransaction)
{
throw new TransactionInProgressException("Connection is already in transaction");
}
Connection connection = null;
try
{
connection = this.getConnection();
}
catch (LookupException e)
{
/**
* must throw to notify user that we couldn't start a connection
*/
throw new PersistenceBrokerException("Can't lookup a connection", e);
}
if (log.isDebugEnabled()) log.debug("localBegin was called for con " + connection);
// change autoCommit state only if we are not in a managed environment
// and it is enabled by user
if(!broker.isManaged())
{
if (jcd.getUseAutoCommit() == JdbcConnectionDescriptor.AUTO_COMMIT_SET_TRUE_AND_TEMPORARY_FALSE)
{
if (log.isDebugEnabled()) log.debug("Try to change autoCommit state to 'false'");
platform.changeAutoCommitState(jcd, connection, false);
}
}
else
{
if(log.isDebugEnabled()) log.debug(
"Found managed environment setting in PB, will skip Platform.changeAutoCommitState(...) call");
}
this.isInLocalTransaction = true;
} }
|
public class class_name {
public void localBegin()
{
if (this.isInLocalTransaction)
{
throw new TransactionInProgressException("Connection is already in transaction");
}
Connection connection = null;
try
{
connection = this.getConnection();
// depends on control dependency: [try], data = [none]
}
catch (LookupException e)
{
/**
* must throw to notify user that we couldn't start a connection
*/
throw new PersistenceBrokerException("Can't lookup a connection", e);
}
if (log.isDebugEnabled()) log.debug("localBegin was called for con " + connection);
// change autoCommit state only if we are not in a managed environment
// and it is enabled by user
if(!broker.isManaged())
{
if (jcd.getUseAutoCommit() == JdbcConnectionDescriptor.AUTO_COMMIT_SET_TRUE_AND_TEMPORARY_FALSE)
{
if (log.isDebugEnabled()) log.debug("Try to change autoCommit state to 'false'");
platform.changeAutoCommitState(jcd, connection, false);
}
// depends on control dependency: [catch], data = [none]
}
else
{
if(log.isDebugEnabled()) log.debug(
"Found managed environment setting in PB, will skip Platform.changeAutoCommitState(...) call");
}
this.isInLocalTransaction = true;
} }
|
public class class_name {
private boolean goToNextStartPosition() throws IOException {
int nextSpans1StartPosition;
int nextSpans1EndPosition;
int nextSpans2StartPosition;
int nextSpans2EndPosition;
while ((nextSpans1StartPosition = spans1.spans
.nextStartPosition()) != NO_MORE_POSITIONS) {
nextSpans1EndPosition = spans1.spans.endPosition();
if (nextSpans1StartPosition <= lastSpans2EndPosition
&& nextSpans1EndPosition >= lastSpans2StartPosition) {
return true;
} else {
while (lastSpans2StartPosition <= nextSpans1EndPosition) {
nextSpans2StartPosition = spans2.spans.nextStartPosition();
if (nextSpans2StartPosition == NO_MORE_POSITIONS) {
noMorePositions = true;
return false;
} else {
nextSpans2EndPosition = spans2.spans.endPosition();
if (nextSpans2StartPosition > lastSpans2StartPosition
|| nextSpans2EndPosition > lastSpans2EndPosition) {
if (nextSpans2EndPosition > lastSpans2EndPosition) {
lastSpans2StartPosition = nextSpans2StartPosition;
lastSpans2EndPosition = nextSpans2EndPosition;
if (nextSpans1StartPosition <= lastSpans2EndPosition
&& nextSpans1EndPosition >= lastSpans2StartPosition) {
return true;
}
}
}
}
}
}
}
noMorePositions = true;
return false;
} }
|
public class class_name {
private boolean goToNextStartPosition() throws IOException {
int nextSpans1StartPosition;
int nextSpans1EndPosition;
int nextSpans2StartPosition;
int nextSpans2EndPosition;
while ((nextSpans1StartPosition = spans1.spans
.nextStartPosition()) != NO_MORE_POSITIONS) {
nextSpans1EndPosition = spans1.spans.endPosition();
if (nextSpans1StartPosition <= lastSpans2EndPosition
&& nextSpans1EndPosition >= lastSpans2StartPosition) {
return true;
} else {
while (lastSpans2StartPosition <= nextSpans1EndPosition) {
nextSpans2StartPosition = spans2.spans.nextStartPosition(); // depends on control dependency: [while], data = [none]
if (nextSpans2StartPosition == NO_MORE_POSITIONS) {
noMorePositions = true; // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
} else {
nextSpans2EndPosition = spans2.spans.endPosition(); // depends on control dependency: [if], data = [none]
if (nextSpans2StartPosition > lastSpans2StartPosition
|| nextSpans2EndPosition > lastSpans2EndPosition) {
if (nextSpans2EndPosition > lastSpans2EndPosition) {
lastSpans2StartPosition = nextSpans2StartPosition; // depends on control dependency: [if], data = [none]
lastSpans2EndPosition = nextSpans2EndPosition; // depends on control dependency: [if], data = [none]
if (nextSpans1StartPosition <= lastSpans2EndPosition
&& nextSpans1EndPosition >= lastSpans2StartPosition) {
return true; // depends on control dependency: [if], data = [none]
}
}
}
}
}
}
}
noMorePositions = true;
return false;
} }
|
public class class_name {
protected void onMouseWheelScroll(Event event) {
int scrollStep = event.getMouseWheelVelocityY() * 5;
Element scrollTarget;
if (getCurrentTarget() != null) {
scrollTarget = getCurrentTarget().getElement();
} else {
scrollTarget = RootPanel.getBodyElement();
}
while ((scrollTarget.getScrollHeight() == scrollTarget.getClientHeight())
&& (scrollTarget != RootPanel.getBodyElement())) {
scrollTarget = scrollTarget.getParentElement();
}
if (scrollTarget == RootPanel.getBodyElement()) {
int top = Window.getScrollTop() + scrollStep;
int left = Window.getScrollLeft();
if (top < 0) {
top = 0;
}
Window.scrollTo(left, top);
} else {
int top = scrollTarget.getScrollTop() + scrollStep;
if (top < 0) {
top = 0;
}
scrollTarget.setScrollTop(top);
}
onMove(event);
} }
|
public class class_name {
protected void onMouseWheelScroll(Event event) {
int scrollStep = event.getMouseWheelVelocityY() * 5;
Element scrollTarget;
if (getCurrentTarget() != null) {
scrollTarget = getCurrentTarget().getElement(); // depends on control dependency: [if], data = [none]
} else {
scrollTarget = RootPanel.getBodyElement(); // depends on control dependency: [if], data = [none]
}
while ((scrollTarget.getScrollHeight() == scrollTarget.getClientHeight())
&& (scrollTarget != RootPanel.getBodyElement())) {
scrollTarget = scrollTarget.getParentElement(); // depends on control dependency: [while], data = [scrollTarg]
}
if (scrollTarget == RootPanel.getBodyElement()) {
int top = Window.getScrollTop() + scrollStep;
int left = Window.getScrollLeft();
if (top < 0) {
top = 0; // depends on control dependency: [if], data = [none]
}
Window.scrollTo(left, top); // depends on control dependency: [if], data = [none]
} else {
int top = scrollTarget.getScrollTop() + scrollStep;
if (top < 0) {
top = 0; // depends on control dependency: [if], data = [none]
}
scrollTarget.setScrollTop(top); // depends on control dependency: [if], data = [none]
}
onMove(event);
} }
|
public class class_name {
@Nullable
public static Bitmap mutable(Bitmap source) {
if (source.isMutable()) {
return source;
}
Config config = source.getConfig();
Bitmap bm = source.copy(config != null ? config : ARGB_8888, true);
if (bm != null) {
source.recycle();
}
return bm;
} }
|
public class class_name {
@Nullable
public static Bitmap mutable(Bitmap source) {
if (source.isMutable()) {
return source; // depends on control dependency: [if], data = [none]
}
Config config = source.getConfig();
Bitmap bm = source.copy(config != null ? config : ARGB_8888, true);
if (bm != null) {
source.recycle(); // depends on control dependency: [if], data = [none]
}
return bm;
} }
|
public class class_name {
public void calculateSize(PdfContext context) {
float width = 0;
float height = 0;
for (PrintComponent<?> child : children) {
child.calculateSize(context);
float cw = child.getBounds().getWidth() + 2 * child.getConstraint().getMarginX();
float ch = child.getBounds().getHeight() + 2 * child.getConstraint().getMarginY();
switch (getConstraint().getFlowDirection()) {
case LayoutConstraint.FLOW_NONE:
width = Math.max(width, cw);
height = Math.max(height, ch);
break;
case LayoutConstraint.FLOW_X:
width += cw;
height = Math.max(height, ch);
break;
case LayoutConstraint.FLOW_Y:
width = Math.max(width, cw);
height += ch;
break;
default:
throw new IllegalStateException("Unknown flow direction " + getConstraint().getFlowDirection());
}
}
if (getConstraint().getWidth() != 0) {
width = getConstraint().getWidth();
}
if (getConstraint().getHeight() != 0) {
height = getConstraint().getHeight();
}
setBounds(new Rectangle(0, 0, width, height));
} }
|
public class class_name {
public void calculateSize(PdfContext context) {
float width = 0;
float height = 0;
for (PrintComponent<?> child : children) {
child.calculateSize(context); // depends on control dependency: [for], data = [child]
float cw = child.getBounds().getWidth() + 2 * child.getConstraint().getMarginX();
float ch = child.getBounds().getHeight() + 2 * child.getConstraint().getMarginY();
switch (getConstraint().getFlowDirection()) {
case LayoutConstraint.FLOW_NONE:
width = Math.max(width, cw);
height = Math.max(height, ch); // depends on control dependency: [for], data = [none]
break;
case LayoutConstraint.FLOW_X:
width += cw;
height = Math.max(height, ch); // depends on control dependency: [for], data = [none]
break;
case LayoutConstraint.FLOW_Y:
width = Math.max(width, cw);
height += ch; // depends on control dependency: [for], data = [none]
break;
default:
throw new IllegalStateException("Unknown flow direction " + getConstraint().getFlowDirection());
}
}
if (getConstraint().getWidth() != 0) {
width = getConstraint().getWidth();
}
if (getConstraint().getHeight() != 0) {
height = getConstraint().getHeight();
}
setBounds(new Rectangle(0, 0, width, height));
} }
|
public class class_name {
synchronized
protected void
setAuthenticationAndProxy(HttpClientBuilder cb)
throws HTTPException
{
// First, setup the ssl factory
cb.setSSLSocketFactory((SSLConnectionSocketFactory) authcontrols.get(AuthProp.SSLFACTORY));
// Second, Construct a CredentialsProvider that is
// the union of the Proxy credentials plus
// either the global or local credentials; local overrides global
// Unfortunately, we cannot either clone or extract the contents
// of the client supplied provider, so we are forced (for now)
// to modify the client supplied provider.
// Look in the local credentials first for for best scope match
AuthScope bestMatch = HTTPAuthUtil.bestmatch(scope, localcreds.keySet());
CredentialsProvider cp = null;
if(bestMatch != null) {
cp = localcreds.get(bestMatch);
} else {
bestMatch = HTTPAuthUtil.bestmatch(scope, globalcredfactories.keySet());
if(bestMatch != null) {
HTTPProviderFactory factory = globalcredfactories.get(bestMatch);
cp = factory.getProvider(bestMatch);
}
}
// Build the proxy credentials and AuthScope
Credentials proxycreds = null;
AuthScope proxyscope = null;
String user = (String) authcontrols.get(AuthProp.PROXYUSER);
String pwd = (String) authcontrols.get(AuthProp.PROXYPWD);
HttpHost httpproxy = (HttpHost) authcontrols.get(AuthProp.HTTPPROXY);
HttpHost httpsproxy = (HttpHost) authcontrols.get(AuthProp.HTTPSPROXY);
if(user != null && (httpproxy != null || httpsproxy != null)) {
if(httpproxy != null)
proxyscope = HTTPAuthUtil.hostToAuthScope(httpproxy);
else //httpsproxy != null
proxyscope = HTTPAuthUtil.hostToAuthScope(httpsproxy);
proxycreds = new UsernamePasswordCredentials(user, pwd);
}
if(cp == null && proxycreds != null && proxyscope != null) {
// If client provider is null and proxycreds are not,
// then use proxycreds alone
cp = new BasicCredentialsProvider();
cp.setCredentials(proxyscope, proxycreds);
} else if(cp != null && proxycreds != null && proxyscope != null) {
// If client provider is not null and proxycreds are not,
// then add proxycreds to the client provider
cp.setCredentials(proxyscope, proxycreds);
}
if(cp != null)
this.sessioncontext.setCredentialsProvider(cp);
} }
|
public class class_name {
synchronized
protected void
setAuthenticationAndProxy(HttpClientBuilder cb)
throws HTTPException
{
// First, setup the ssl factory
cb.setSSLSocketFactory((SSLConnectionSocketFactory) authcontrols.get(AuthProp.SSLFACTORY));
// Second, Construct a CredentialsProvider that is
// the union of the Proxy credentials plus
// either the global or local credentials; local overrides global
// Unfortunately, we cannot either clone or extract the contents
// of the client supplied provider, so we are forced (for now)
// to modify the client supplied provider.
// Look in the local credentials first for for best scope match
AuthScope bestMatch = HTTPAuthUtil.bestmatch(scope, localcreds.keySet());
CredentialsProvider cp = null;
if(bestMatch != null) {
cp = localcreds.get(bestMatch);
} else {
bestMatch = HTTPAuthUtil.bestmatch(scope, globalcredfactories.keySet());
if(bestMatch != null) {
HTTPProviderFactory factory = globalcredfactories.get(bestMatch);
cp = factory.getProvider(bestMatch); // depends on control dependency: [if], data = [(bestMatch]
}
}
// Build the proxy credentials and AuthScope
Credentials proxycreds = null;
AuthScope proxyscope = null;
String user = (String) authcontrols.get(AuthProp.PROXYUSER);
String pwd = (String) authcontrols.get(AuthProp.PROXYPWD);
HttpHost httpproxy = (HttpHost) authcontrols.get(AuthProp.HTTPPROXY);
HttpHost httpsproxy = (HttpHost) authcontrols.get(AuthProp.HTTPSPROXY);
if(user != null && (httpproxy != null || httpsproxy != null)) {
if(httpproxy != null)
proxyscope = HTTPAuthUtil.hostToAuthScope(httpproxy);
else //httpsproxy != null
proxyscope = HTTPAuthUtil.hostToAuthScope(httpsproxy);
proxycreds = new UsernamePasswordCredentials(user, pwd);
}
if(cp == null && proxycreds != null && proxyscope != null) {
// If client provider is null and proxycreds are not,
// then use proxycreds alone
cp = new BasicCredentialsProvider();
cp.setCredentials(proxyscope, proxycreds);
} else if(cp != null && proxycreds != null && proxyscope != null) {
// If client provider is not null and proxycreds are not,
// then add proxycreds to the client provider
cp.setCredentials(proxyscope, proxycreds);
}
if(cp != null)
this.sessioncontext.setCredentialsProvider(cp);
} }
|
public class class_name {
private void updateRefOfDita(final Map<URI, URI> changeTable, final Map<URI, URI> conflictTable) {
final TopicRefWriter topicRefWriter = new TopicRefWriter();
topicRefWriter.setLogger(logger);
topicRefWriter.setJob(job);
topicRefWriter.setChangeTable(changeTable);
topicRefWriter.setup(conflictTable);
try {
for (final FileInfo f : job.getFileInfo()) {
if (ATTR_FORMAT_VALUE_DITA.equals(f.format) || ATTR_FORMAT_VALUE_DITAMAP.equals(f.format)) {
topicRefWriter.setFixpath(relativePath2fix.get(f.uri));
final File tmp = new File(job.tempDirURI.resolve(f.uri));
topicRefWriter.write(tmp);
}
}
} catch (final DITAOTException ex) {
logger.error(ex.getMessage(), ex);
}
} }
|
public class class_name {
private void updateRefOfDita(final Map<URI, URI> changeTable, final Map<URI, URI> conflictTable) {
final TopicRefWriter topicRefWriter = new TopicRefWriter();
topicRefWriter.setLogger(logger);
topicRefWriter.setJob(job);
topicRefWriter.setChangeTable(changeTable);
topicRefWriter.setup(conflictTable);
try {
for (final FileInfo f : job.getFileInfo()) {
if (ATTR_FORMAT_VALUE_DITA.equals(f.format) || ATTR_FORMAT_VALUE_DITAMAP.equals(f.format)) {
topicRefWriter.setFixpath(relativePath2fix.get(f.uri)); // depends on control dependency: [if], data = [none]
final File tmp = new File(job.tempDirURI.resolve(f.uri));
topicRefWriter.write(tmp); // depends on control dependency: [if], data = [none]
}
}
} catch (final DITAOTException ex) {
logger.error(ex.getMessage(), ex);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
@ExportInstanceMethod
public static void setShape(PointSymbolizerShapeAndSize instance, String shape) {
if ("circle".equals(shape)) {
instance.setShape(PointSymbolizerShapeAndSize.Shape.CIRCLE);
} else if ("square".equals(shape)) {
instance.setShape(PointSymbolizerShapeAndSize.Shape.SQUARE);
} else {
//default value
instance.setShape(PointSymbolizerShapeAndSize.Shape.SQUARE);
}
} }
|
public class class_name {
@ExportInstanceMethod
public static void setShape(PointSymbolizerShapeAndSize instance, String shape) {
if ("circle".equals(shape)) {
instance.setShape(PointSymbolizerShapeAndSize.Shape.CIRCLE); // depends on control dependency: [if], data = [none]
} else if ("square".equals(shape)) {
instance.setShape(PointSymbolizerShapeAndSize.Shape.SQUARE); // depends on control dependency: [if], data = [none]
} else {
//default value
instance.setShape(PointSymbolizerShapeAndSize.Shape.SQUARE); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public List<String> getPackages() {
List<String> list = new ArrayList<>();
Consumer<JsonArray> reader = array -> {
if ((array != null && !array.isEmpty())) {
for (int i = 0; i < array.size(); i++) {
list.add(array.getString(i));
}
}
};
JsonArray resources = config.getJsonArray(CONFIG_RESOURCES, null);
JsonArray packages = config.getJsonArray(CONFIG_PACKAGES, null);
reader.accept(resources);
reader.accept(packages);
return list;
} }
|
public class class_name {
@Override
public List<String> getPackages() {
List<String> list = new ArrayList<>();
Consumer<JsonArray> reader = array -> {
if ((array != null && !array.isEmpty())) {
for (int i = 0; i < array.size(); i++) {
list.add(array.getString(i)); // depends on control dependency: [for], data = [i]
}
}
};
JsonArray resources = config.getJsonArray(CONFIG_RESOURCES, null);
JsonArray packages = config.getJsonArray(CONFIG_PACKAGES, null);
reader.accept(resources);
reader.accept(packages);
return list;
} }
|
public class class_name {
private void init() throws IOException {
// move to begin
IndexInput inField = indexInputList.get("field");
inField.seek(indexInputOffsetList.get("field"));
// store field references in memory
fieldReferences = new HashMap<String, FieldReferences>();
boolean doInit = true;
while (doInit) {
try {
String field = inField.readString();
long refIndexDoc = inField.readVLong();
long refIndexDocId = inField.readVLong();
int numberOfDocs = inField.readVInt();
inField.readVLong(); // refTerm
inField.readVInt(); // numberOfTerms
long refPrefix = inField.readVLong();
int numberOfPrefixes = inField.readVInt();
fieldReferences.put(field, new FieldReferences(refIndexDoc,
refIndexDocId, numberOfDocs, refPrefix, numberOfPrefixes));
} catch (IOException e) {
log.debug(e);
doInit = false;
}
}
// prefixReferences
prefixReferences = new HashMap<String, LinkedHashMap<String, Long>>();
} }
|
public class class_name {
private void init() throws IOException {
// move to begin
IndexInput inField = indexInputList.get("field");
inField.seek(indexInputOffsetList.get("field"));
// store field references in memory
fieldReferences = new HashMap<String, FieldReferences>();
boolean doInit = true;
while (doInit) {
try {
String field = inField.readString();
long refIndexDoc = inField.readVLong();
long refIndexDocId = inField.readVLong();
int numberOfDocs = inField.readVInt();
inField.readVLong(); // refTerm // depends on control dependency: [try], data = [none]
inField.readVInt(); // numberOfTerms // depends on control dependency: [try], data = [none]
long refPrefix = inField.readVLong();
int numberOfPrefixes = inField.readVInt();
fieldReferences.put(field, new FieldReferences(refIndexDoc,
refIndexDocId, numberOfDocs, refPrefix, numberOfPrefixes)); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
log.debug(e);
doInit = false;
} // depends on control dependency: [catch], data = [none]
}
// prefixReferences
prefixReferences = new HashMap<String, LinkedHashMap<String, Long>>();
} }
|
public class class_name {
int inflateBytes(byte[] b, int offset, int length) throws DataFormatException, ZipException {
checkState(!closed, "GzipInflatingBuffer is closed");
int bytesRead = 0;
int missingBytes;
boolean madeProgress = true;
while (madeProgress && (missingBytes = length - bytesRead) > 0) {
switch (state) {
case HEADER:
madeProgress = processHeader();
break;
case HEADER_EXTRA_LEN:
madeProgress = processHeaderExtraLen();
break;
case HEADER_EXTRA:
madeProgress = processHeaderExtra();
break;
case HEADER_NAME:
madeProgress = processHeaderName();
break;
case HEADER_COMMENT:
madeProgress = processHeaderComment();
break;
case HEADER_CRC:
madeProgress = processHeaderCrc();
break;
case INITIALIZE_INFLATER:
madeProgress = initializeInflater();
break;
case INFLATING:
bytesRead += inflate(b, offset + bytesRead, missingBytes);
if (state == State.TRAILER) {
// Eagerly process trailer, if available, to validate CRC.
madeProgress = processTrailer();
} else {
// Continue in INFLATING until we have the required bytes or we transition to
// INFLATER_NEEDS_INPUT
madeProgress = true;
}
break;
case INFLATER_NEEDS_INPUT:
madeProgress = fill();
break;
case TRAILER:
madeProgress = processTrailer();
break;
default:
throw new AssertionError("Invalid state: " + state);
}
}
// If we finished a gzip block, check if we have enough bytes to read another header
isStalled =
!madeProgress
|| (state == State.HEADER && gzipMetadataReader.readableBytes() < GZIP_HEADER_MIN_SIZE);
return bytesRead;
} }
|
public class class_name {
int inflateBytes(byte[] b, int offset, int length) throws DataFormatException, ZipException {
checkState(!closed, "GzipInflatingBuffer is closed");
int bytesRead = 0;
int missingBytes;
boolean madeProgress = true;
while (madeProgress && (missingBytes = length - bytesRead) > 0) {
switch (state) {
case HEADER:
madeProgress = processHeader();
break;
case HEADER_EXTRA_LEN:
madeProgress = processHeaderExtraLen();
break;
case HEADER_EXTRA:
madeProgress = processHeaderExtra();
break;
case HEADER_NAME:
madeProgress = processHeaderName();
break;
case HEADER_COMMENT:
madeProgress = processHeaderComment();
break;
case HEADER_CRC:
madeProgress = processHeaderCrc();
break;
case INITIALIZE_INFLATER:
madeProgress = initializeInflater();
break;
case INFLATING:
bytesRead += inflate(b, offset + bytesRead, missingBytes);
if (state == State.TRAILER) {
// Eagerly process trailer, if available, to validate CRC.
madeProgress = processTrailer(); // depends on control dependency: [if], data = [none]
} else {
// Continue in INFLATING until we have the required bytes or we transition to
// INFLATER_NEEDS_INPUT
madeProgress = true; // depends on control dependency: [if], data = [none]
}
break;
case INFLATER_NEEDS_INPUT:
madeProgress = fill();
break;
case TRAILER:
madeProgress = processTrailer();
break;
default:
throw new AssertionError("Invalid state: " + state);
}
}
// If we finished a gzip block, check if we have enough bytes to read another header
isStalled =
!madeProgress
|| (state == State.HEADER && gzipMetadataReader.readableBytes() < GZIP_HEADER_MIN_SIZE);
return bytesRead;
} }
|
public class class_name {
private void doLoad() throws IOException {
final String libName = getName();
try {
System.loadLibrary(libName);
} catch (UnsatisfiedLinkError e) {
try {
extractAndLoad(getPlatformLibraryPath(), getSimpleLibraryPath());
} catch (IOException ioe) {
logger.warn("Failed to load library from both native path and jar!");
throw ioe;
}
}
} }
|
public class class_name {
private void doLoad() throws IOException {
final String libName = getName();
try {
System.loadLibrary(libName);
} catch (UnsatisfiedLinkError e) {
try {
extractAndLoad(getPlatformLibraryPath(), getSimpleLibraryPath()); // depends on control dependency: [try], data = [none]
} catch (IOException ioe) {
logger.warn("Failed to load library from both native path and jar!");
throw ioe;
} // depends on control dependency: [catch], data = [none]
}
} }
|
public class class_name {
static boolean essentiallyEqualsTo(RedisNodeDescription o1, RedisNodeDescription o2) {
if (o2 == null) {
return false;
}
if (o1.getRole() != o2.getRole()) {
return false;
}
if (!o1.getUri().equals(o2.getUri())) {
return false;
}
return true;
} }
|
public class class_name {
static boolean essentiallyEqualsTo(RedisNodeDescription o1, RedisNodeDescription o2) {
if (o2 == null) {
return false; // depends on control dependency: [if], data = [none]
}
if (o1.getRole() != o2.getRole()) {
return false; // depends on control dependency: [if], data = [none]
}
if (!o1.getUri().equals(o2.getUri())) {
return false; // depends on control dependency: [if], data = [none]
}
return true;
} }
|
public class class_name {
public void execute(final String[] args) {
log.info("Generation started");
try {
Options options = createOptions();
if (collectOptions(options, args)) {
setDefaults();
if (input.isDirectory()) {
for (File cobolFile : FileUtils
.listFiles(input, null, true)) {
generate(configProps, cobolFile, inputEncoding, output,
packagePrefix, xsltFileName);
}
} else {
generate(configProps, input, inputEncoding, output,
packagePrefix, xsltFileName);
}
}
} catch (Exception e) {
log.error("Generation failed", e);
} finally {
log.info("Generation ended");
}
} }
|
public class class_name {
public void execute(final String[] args) {
log.info("Generation started");
try {
Options options = createOptions();
if (collectOptions(options, args)) {
setDefaults(); // depends on control dependency: [if], data = [none]
if (input.isDirectory()) {
for (File cobolFile : FileUtils
.listFiles(input, null, true)) {
generate(configProps, cobolFile, inputEncoding, output,
packagePrefix, xsltFileName); // depends on control dependency: [for], data = [cobolFile]
}
} else {
generate(configProps, input, inputEncoding, output,
packagePrefix, xsltFileName); // depends on control dependency: [if], data = [none]
}
}
} catch (Exception e) {
log.error("Generation failed", e);
} finally { // depends on control dependency: [catch], data = [none]
log.info("Generation ended");
}
} }
|
public class class_name {
private void setupSubjects(List<Subject> subjects) throws ParsingException {
// make sure that there is at least one Subject
if (subjects.size() == 0)
throw new ParsingException("Request must a contain subject");
// now go through the subject attributes
Iterator<Subject> it = subjects.iterator();
while (it.hasNext()) {
Subject subject = it.next();
URI category = subject.getCategory();
Map<String, List<Attribute>> categoryMap = null;
// see if we've already got a map for the category
if (subjectMap.containsKey(category)) {
categoryMap = subjectMap.get(category);
} else {
categoryMap = new HashMap<String, List<Attribute>>();
subjectMap.put(category, categoryMap);
}
// iterate over the set of attributes
Iterator attrIterator = subject.getAttributesAsList().iterator();
while (attrIterator.hasNext()) {
Attribute attr = (Attribute)(attrIterator.next());
String id = attr.getId().toString();
if (categoryMap.containsKey(id)) {
// add to the existing set of Attributes w/this id
List existingIds = (List)(categoryMap.get(id));
existingIds.add(attr);
} else {
// this is the first Attr w/this id
List newIds = new ArrayList();
newIds.add(attr);
categoryMap.put(id, newIds);
}
}
}
} }
|
public class class_name {
private void setupSubjects(List<Subject> subjects) throws ParsingException {
// make sure that there is at least one Subject
if (subjects.size() == 0)
throw new ParsingException("Request must a contain subject");
// now go through the subject attributes
Iterator<Subject> it = subjects.iterator();
while (it.hasNext()) {
Subject subject = it.next();
URI category = subject.getCategory();
Map<String, List<Attribute>> categoryMap = null;
// see if we've already got a map for the category
if (subjectMap.containsKey(category)) {
categoryMap = subjectMap.get(category);
} else {
categoryMap = new HashMap<String, List<Attribute>>();
subjectMap.put(category, categoryMap);
}
// iterate over the set of attributes
Iterator attrIterator = subject.getAttributesAsList().iterator();
while (attrIterator.hasNext()) {
Attribute attr = (Attribute)(attrIterator.next());
String id = attr.getId().toString();
if (categoryMap.containsKey(id)) {
// add to the existing set of Attributes w/this id
List existingIds = (List)(categoryMap.get(id));
existingIds.add(attr); // depends on control dependency: [if], data = [none]
} else {
// this is the first Attr w/this id
List newIds = new ArrayList();
newIds.add(attr); // depends on control dependency: [if], data = [none]
categoryMap.put(id, newIds); // depends on control dependency: [if], data = [none]
}
}
}
} }
|
public class class_name {
public void dbSchemaCheckVersion() {
try {
String dbVersion = getDbVersion();
if (!ProcessEngine.VERSION.equals(dbVersion)) {
throw LOG.wrongDbVersionException(ProcessEngine.VERSION, dbVersion);
}
List<String> missingComponents = new ArrayList<>();
if (!isEngineTablePresent()) {
missingComponents.add("engine");
}
if (dbSqlSessionFactory.isDbHistoryUsed() && !isHistoryTablePresent()) {
missingComponents.add("history");
}
if (dbSqlSessionFactory.isDbIdentityUsed() && !isIdentityTablePresent()) {
missingComponents.add("identity");
}
if (dbSqlSessionFactory.isCmmnEnabled() && !isCmmnTablePresent()) {
missingComponents.add("case.engine");
}
if (dbSqlSessionFactory.isDmnEnabled() && !isDmnTablePresent()) {
missingComponents.add("decision.engine");
}
if (!missingComponents.isEmpty()) {
throw LOG.missingTableException(missingComponents);
}
} catch (Exception e) {
if (isMissingTablesException(e)) {
throw LOG.missingActivitiTablesException();
} else {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else {
throw LOG.unableToFetchDbSchemaVersion(e);
}
}
}
} }
|
public class class_name {
public void dbSchemaCheckVersion() {
try {
String dbVersion = getDbVersion();
if (!ProcessEngine.VERSION.equals(dbVersion)) {
throw LOG.wrongDbVersionException(ProcessEngine.VERSION, dbVersion);
}
List<String> missingComponents = new ArrayList<>();
if (!isEngineTablePresent()) {
missingComponents.add("engine"); // depends on control dependency: [if], data = [none]
}
if (dbSqlSessionFactory.isDbHistoryUsed() && !isHistoryTablePresent()) {
missingComponents.add("history"); // depends on control dependency: [if], data = [none]
}
if (dbSqlSessionFactory.isDbIdentityUsed() && !isIdentityTablePresent()) {
missingComponents.add("identity"); // depends on control dependency: [if], data = [none]
}
if (dbSqlSessionFactory.isCmmnEnabled() && !isCmmnTablePresent()) {
missingComponents.add("case.engine"); // depends on control dependency: [if], data = [none]
}
if (dbSqlSessionFactory.isDmnEnabled() && !isDmnTablePresent()) {
missingComponents.add("decision.engine"); // depends on control dependency: [if], data = [none]
}
if (!missingComponents.isEmpty()) {
throw LOG.missingTableException(missingComponents);
}
} catch (Exception e) {
if (isMissingTablesException(e)) {
throw LOG.missingActivitiTablesException();
} else {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else {
throw LOG.unableToFetchDbSchemaVersion(e);
}
}
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static void process(GrayF32 orig,
GrayF32 derivXX, GrayF32 derivYY, GrayF32 derivXY,
ImageBorder_F32 border ) {
InputSanityCheck.reshapeOneIn(orig, derivXX, derivYY, derivXY);
HessianThree_Standard.process(orig, derivXX, derivYY, derivXY);
if( border != null ) {
DerivativeHelperFunctions.processBorderHorizontal(orig, derivXX ,kernelXXYY_F32, border );
DerivativeHelperFunctions.processBorderVertical(orig, derivYY ,kernelXXYY_F32, border );
ConvolveJustBorder_General_SB.convolve(kernelCross_F32,border,derivXY);
}
} }
|
public class class_name {
public static void process(GrayF32 orig,
GrayF32 derivXX, GrayF32 derivYY, GrayF32 derivXY,
ImageBorder_F32 border ) {
InputSanityCheck.reshapeOneIn(orig, derivXX, derivYY, derivXY);
HessianThree_Standard.process(orig, derivXX, derivYY, derivXY);
if( border != null ) {
DerivativeHelperFunctions.processBorderHorizontal(orig, derivXX ,kernelXXYY_F32, border ); // depends on control dependency: [if], data = [none]
DerivativeHelperFunctions.processBorderVertical(orig, derivYY ,kernelXXYY_F32, border ); // depends on control dependency: [if], data = [none]
ConvolveJustBorder_General_SB.convolve(kernelCross_F32,border,derivXY); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public OperaRunner getRunner() {
String klassName = (String) options.get(RUNNER).getValue();
// If no runner is set, use the default one
if (klassName == null) {
setRunner(OperaLauncherRunner.class);
return getRunner();
}
Class<?> klass;
try {
klass = Class.forName(klassName);
} catch (ClassNotFoundException e) {
throw new WebDriverException("Unable to find runner class on classpath: " + klassName);
}
Constructor constructor;
try {
constructor = klass.getDeclaredConstructor(OperaSettings.class);
} catch (NoSuchMethodException e) {
throw new WebDriverException("Invalid constructor in runner: " + klass.getName());
}
OperaRunner runner;
try {
runner = (OperaRunner) constructor.newInstance(this);
} catch (InstantiationException e) {
throw new WebDriverException("Unable to create new instance of runner", e);
} catch (IllegalAccessException e) {
throw new WebDriverException("Denied access to runner: " + klass.getName());
} catch (InvocationTargetException e) {
throw new WebDriverException("Runner threw exception on construction", e);
}
return runner;
} }
|
public class class_name {
public OperaRunner getRunner() {
String klassName = (String) options.get(RUNNER).getValue();
// If no runner is set, use the default one
if (klassName == null) {
setRunner(OperaLauncherRunner.class); // depends on control dependency: [if], data = [none]
return getRunner(); // depends on control dependency: [if], data = [none]
}
Class<?> klass;
try {
klass = Class.forName(klassName); // depends on control dependency: [try], data = [none]
} catch (ClassNotFoundException e) {
throw new WebDriverException("Unable to find runner class on classpath: " + klassName);
} // depends on control dependency: [catch], data = [none]
Constructor constructor;
try {
constructor = klass.getDeclaredConstructor(OperaSettings.class); // depends on control dependency: [try], data = [none]
} catch (NoSuchMethodException e) {
throw new WebDriverException("Invalid constructor in runner: " + klass.getName());
} // depends on control dependency: [catch], data = [none]
OperaRunner runner;
try {
runner = (OperaRunner) constructor.newInstance(this); // depends on control dependency: [try], data = [none]
} catch (InstantiationException e) {
throw new WebDriverException("Unable to create new instance of runner", e);
} catch (IllegalAccessException e) { // depends on control dependency: [catch], data = [none]
throw new WebDriverException("Denied access to runner: " + klass.getName());
} catch (InvocationTargetException e) { // depends on control dependency: [catch], data = [none]
throw new WebDriverException("Runner threw exception on construction", e);
} // depends on control dependency: [catch], data = [none]
return runner;
} }
|
public class class_name {
public String nelson(NelsonCondition condition) {
String description;
if (condition.getContext() != null && condition.getContext().get(CONTEXT_PROPERTY_DESCRIPTION) != null) {
description = condition.getContext().get(CONTEXT_PROPERTY_DESCRIPTION);
} else {
description = condition.getDataId();
}
description += " violates one or the following Nelson rules: " + condition.getActiveRules();
return description;
} }
|
public class class_name {
public String nelson(NelsonCondition condition) {
String description;
if (condition.getContext() != null && condition.getContext().get(CONTEXT_PROPERTY_DESCRIPTION) != null) {
description = condition.getContext().get(CONTEXT_PROPERTY_DESCRIPTION); // depends on control dependency: [if], data = [none]
} else {
description = condition.getDataId(); // depends on control dependency: [if], data = [none]
}
description += " violates one or the following Nelson rules: " + condition.getActiveRules();
return description;
} }
|
public class class_name {
public static void initBackendFactory(Timer timer) throws IOException {
RrdNioBackend.setFileSyncTimer(timer);
try {
if (!RrdBackendFactory.getDefaultFactory().getFactoryName()
.equals(RrdNioBackendFactory.FACTORY_NAME)) {
RrdBackendFactory.registerAndSetAsDefaultFactory(new RrdNioBackendFactory());
}
} catch (final RrdException e) {
throw createIOException(e);
}
} }
|
public class class_name {
public static void initBackendFactory(Timer timer) throws IOException {
RrdNioBackend.setFileSyncTimer(timer);
try {
if (!RrdBackendFactory.getDefaultFactory().getFactoryName()
.equals(RrdNioBackendFactory.FACTORY_NAME)) {
RrdBackendFactory.registerAndSetAsDefaultFactory(new RrdNioBackendFactory());
// depends on control dependency: [if], data = [none]
}
} catch (final RrdException e) {
throw createIOException(e);
}
} }
|
public class class_name {
public void processDialogTerminated(final DialogTerminatedEvent dialogTerminatedEvent) {
final Dialog dialog = dialogTerminatedEvent.getDialog();
if(logger.isDebugEnabled()) {
logger.debug("Dialog Terminated => dialog Id : " + dialogTerminatedEvent.getDialog().getDialogId());
}
getAsynchronousExecutor().execute(new Runnable() {
// https://github.com/RestComm/sip-servlets/issues/107 guard against NPEon concurrent cleanup
final TransactionApplicationData dialogAppData = (TransactionApplicationData) dialog.getApplicationData();
public void run() {
try {
boolean appDataFound = false;
TransactionApplicationData txAppData = null;
if(dialogAppData != null) {
if(dialogAppData.getSipServletMessage() == null) {
Transaction transaction = dialogAppData.getTransaction();
if(transaction != null && transaction.getApplicationData() != null) {
txAppData = (TransactionApplicationData) transaction.getApplicationData();
txAppData.cleanUp();
}
} else {
MobicentsSipSessionKey sipSessionKey = dialogAppData.getSipSessionKey();
tryToInvalidateSession(sipSessionKey, false);
}
dialogAppData.cleanUp();
// since the stack doesn't nullify the app data, we need to do it to let go of the refs
dialog.setApplicationData(null);
}
if(!appDataFound && logger.isDebugEnabled()) {
logger.debug("no application data for this dialog " + dialog.getDialogId());
}
} catch (Exception e) {
logger.error("Problem handling dialog termination", e);
}
}
});
} }
|
public class class_name {
public void processDialogTerminated(final DialogTerminatedEvent dialogTerminatedEvent) {
final Dialog dialog = dialogTerminatedEvent.getDialog();
if(logger.isDebugEnabled()) {
logger.debug("Dialog Terminated => dialog Id : " + dialogTerminatedEvent.getDialog().getDialogId()); // depends on control dependency: [if], data = [none]
}
getAsynchronousExecutor().execute(new Runnable() {
// https://github.com/RestComm/sip-servlets/issues/107 guard against NPEon concurrent cleanup
final TransactionApplicationData dialogAppData = (TransactionApplicationData) dialog.getApplicationData();
public void run() {
try {
boolean appDataFound = false;
TransactionApplicationData txAppData = null;
if(dialogAppData != null) {
if(dialogAppData.getSipServletMessage() == null) {
Transaction transaction = dialogAppData.getTransaction();
if(transaction != null && transaction.getApplicationData() != null) {
txAppData = (TransactionApplicationData) transaction.getApplicationData(); // depends on control dependency: [if], data = [none]
txAppData.cleanUp(); // depends on control dependency: [if], data = [none]
}
} else {
MobicentsSipSessionKey sipSessionKey = dialogAppData.getSipSessionKey();
tryToInvalidateSession(sipSessionKey, false); // depends on control dependency: [if], data = [none]
}
dialogAppData.cleanUp(); // depends on control dependency: [if], data = [none]
// since the stack doesn't nullify the app data, we need to do it to let go of the refs
dialog.setApplicationData(null); // depends on control dependency: [if], data = [null)]
}
if(!appDataFound && logger.isDebugEnabled()) {
logger.debug("no application data for this dialog " + dialog.getDialogId()); // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
logger.error("Problem handling dialog termination", e);
} // depends on control dependency: [catch], data = [none]
}
});
} }
|
public class class_name {
public EClass getIfcPlanarExtent() {
if (ifcPlanarExtentEClass == null) {
ifcPlanarExtentEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(360);
}
return ifcPlanarExtentEClass;
} }
|
public class class_name {
public EClass getIfcPlanarExtent() {
if (ifcPlanarExtentEClass == null) {
ifcPlanarExtentEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(360);
// depends on control dependency: [if], data = [none]
}
return ifcPlanarExtentEClass;
} }
|
public class class_name {
public static boolean contains(final byte[] pByte, final SwEnum... pEnum) {
SwEnum val = SwEnum.getSW(pByte);
if (LOGGER.isDebugEnabled() && pByte != null) {
LOGGER.debug("Response Status <"
+ BytesUtils.bytesToStringNoSpace(Arrays.copyOfRange(pByte, Math.max(pByte.length - 2, 0), pByte.length)) + "> : "
+ (val != null ? val.getDetail() : "Unknow"));
}
return val != null && ArrayUtils.contains(pEnum, val);
} }
|
public class class_name {
public static boolean contains(final byte[] pByte, final SwEnum... pEnum) {
SwEnum val = SwEnum.getSW(pByte);
if (LOGGER.isDebugEnabled() && pByte != null) {
LOGGER.debug("Response Status <"
+ BytesUtils.bytesToStringNoSpace(Arrays.copyOfRange(pByte, Math.max(pByte.length - 2, 0), pByte.length)) + "> : "
+ (val != null ? val.getDetail() : "Unknow")); // depends on control dependency: [if], data = [none]
}
return val != null && ArrayUtils.contains(pEnum, val);
} }
|
public class class_name {
protected final PainGeneratorIf getPainGenerator() {
if (this.generator == null) {
try {
this.generator = PainGeneratorFactory.get(this, this.getPainVersion());
} catch (Exception e) {
String msg = HBCIUtils.getLocMsg("EXCMSG_JOB_CREATE_ERR", this.getPainJobName());
throw new HBCI_Exception(msg, e);
}
}
return this.generator;
} }
|
public class class_name {
protected final PainGeneratorIf getPainGenerator() {
if (this.generator == null) {
try {
this.generator = PainGeneratorFactory.get(this, this.getPainVersion()); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
String msg = HBCIUtils.getLocMsg("EXCMSG_JOB_CREATE_ERR", this.getPainJobName());
throw new HBCI_Exception(msg, e);
} // depends on control dependency: [catch], data = [none]
}
return this.generator;
} }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.