code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
|---|---|
public class class_name {
public static String clobToStr(Clob clob) {
Reader reader = null;
try {
reader = clob.getCharacterStream();
return IoUtil.read(reader);
} catch (SQLException e) {
throw new DbRuntimeException(e);
} finally {
IoUtil.close(reader);
}
} }
|
public class class_name {
public static String clobToStr(Clob clob) {
Reader reader = null;
try {
reader = clob.getCharacterStream();
// depends on control dependency: [try], data = [none]
return IoUtil.read(reader);
// depends on control dependency: [try], data = [none]
} catch (SQLException e) {
throw new DbRuntimeException(e);
} finally {
// depends on control dependency: [catch], data = [none]
IoUtil.close(reader);
}
} }
|
public class class_name {
@Override
public void removeByUuid(String uuid) {
for (CommerceSubscriptionEntry commerceSubscriptionEntry : findByUuid(
uuid, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) {
remove(commerceSubscriptionEntry);
}
} }
|
public class class_name {
@Override
public void removeByUuid(String uuid) {
for (CommerceSubscriptionEntry commerceSubscriptionEntry : findByUuid(
uuid, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) {
remove(commerceSubscriptionEntry); // depends on control dependency: [for], data = [commerceSubscriptionEntry]
}
} }
|
public class class_name {
public <T> T lookupNodeDatum (Function<NodeObject,T> op)
{
for (T value :
Iterables.filter(Iterables.transform(getNodeObjects(), op), Predicates.notNull())) {
return value;
}
return null;
} }
|
public class class_name {
public <T> T lookupNodeDatum (Function<NodeObject,T> op)
{
for (T value :
Iterables.filter(Iterables.transform(getNodeObjects(), op), Predicates.notNull())) {
return value; // depends on control dependency: [for], data = [value]
}
return null;
} }
|
public class class_name {
private SimpleMatrix computeAffineH( List<AssociatedPair> observations ,
DMatrixRMaj H , DMatrixRMaj Hzero ) {
SimpleMatrix A = new SimpleMatrix(observations.size(),3);
SimpleMatrix b = new SimpleMatrix(A.numRows(),1);
Point2D_F64 c = new Point2D_F64();
Point2D_F64 k = new Point2D_F64();
for( int i = 0; i < observations.size(); i++ ) {
AssociatedPair a = observations.get(i);
GeometryMath_F64.mult(Hzero, a.p1, k);
GeometryMath_F64.mult(H,a.p2,c);
A.setRow(i,0,k.x,k.y,1);
b.set(i,0,c.x);
}
SimpleMatrix x = A.solve(b);
SimpleMatrix Ha = SimpleMatrix.identity(3);
Ha.setRow(0,0,x.getDDRM().data);
return Ha;
} }
|
public class class_name {
private SimpleMatrix computeAffineH( List<AssociatedPair> observations ,
DMatrixRMaj H , DMatrixRMaj Hzero ) {
SimpleMatrix A = new SimpleMatrix(observations.size(),3);
SimpleMatrix b = new SimpleMatrix(A.numRows(),1);
Point2D_F64 c = new Point2D_F64();
Point2D_F64 k = new Point2D_F64();
for( int i = 0; i < observations.size(); i++ ) {
AssociatedPair a = observations.get(i);
GeometryMath_F64.mult(Hzero, a.p1, k); // depends on control dependency: [for], data = [none]
GeometryMath_F64.mult(H,a.p2,c); // depends on control dependency: [for], data = [none]
A.setRow(i,0,k.x,k.y,1); // depends on control dependency: [for], data = [i]
b.set(i,0,c.x); // depends on control dependency: [for], data = [i]
}
SimpleMatrix x = A.solve(b);
SimpleMatrix Ha = SimpleMatrix.identity(3);
Ha.setRow(0,0,x.getDDRM().data);
return Ha;
} }
|
public class class_name {
private void scanCodebase(IClassPath classPath, LinkedList<WorkListItem> workList, DiscoveredCodeBase discoveredCodeBase)
throws InterruptedException {
if (DEBUG) {
System.out.println("Scanning " + discoveredCodeBase.getCodeBase().getCodeBaseLocator());
}
IScannableCodeBase codeBase = (IScannableCodeBase) discoveredCodeBase.getCodeBase();
ICodeBaseIterator i = codeBase.iterator();
while (i.hasNext()) {
ICodeBaseEntry entry = i.next();
if (VERBOSE) {
System.out.println("Entry: " + entry.getResourceName());
}
if (!NO_PARSE_CLASS_NAMES && codeBase.isApplicationCodeBase()
&& DescriptorFactory.isClassResource(entry.getResourceName()) && !(entry instanceof SingleFileCodeBaseEntry)) {
parseClassName(entry);
}
// Note the resource exists in this codebase
discoveredCodeBase.addCodeBaseEntry(entry);
// If resource is a nested archive, add it to the worklist
if (scanNestedArchives && (codeBase.isApplicationCodeBase() || codeBase instanceof DirectoryCodeBase)
&& Archive.isLibraryFileName(entry.getResourceName())) {
if (VERBOSE) {
System.out.println("Entry is an library!");
}
ICodeBaseLocator nestedArchiveLocator = classFactory.createNestedArchiveCodeBaseLocator(codeBase,
entry.getResourceName());
addToWorkList(workList,
new WorkListItem(nestedArchiveLocator, codeBase.isApplicationCodeBase(), ICodeBase.Discovered.NESTED));
}
}
} }
|
public class class_name {
private void scanCodebase(IClassPath classPath, LinkedList<WorkListItem> workList, DiscoveredCodeBase discoveredCodeBase)
throws InterruptedException {
if (DEBUG) {
System.out.println("Scanning " + discoveredCodeBase.getCodeBase().getCodeBaseLocator());
}
IScannableCodeBase codeBase = (IScannableCodeBase) discoveredCodeBase.getCodeBase();
ICodeBaseIterator i = codeBase.iterator();
while (i.hasNext()) {
ICodeBaseEntry entry = i.next();
if (VERBOSE) {
System.out.println("Entry: " + entry.getResourceName());
}
if (!NO_PARSE_CLASS_NAMES && codeBase.isApplicationCodeBase()
&& DescriptorFactory.isClassResource(entry.getResourceName()) && !(entry instanceof SingleFileCodeBaseEntry)) {
parseClassName(entry);
}
// Note the resource exists in this codebase
discoveredCodeBase.addCodeBaseEntry(entry);
// If resource is a nested archive, add it to the worklist
if (scanNestedArchives && (codeBase.isApplicationCodeBase() || codeBase instanceof DirectoryCodeBase)
&& Archive.isLibraryFileName(entry.getResourceName())) {
if (VERBOSE) {
System.out.println("Entry is an library!"); // depends on control dependency: [if], data = [none]
}
ICodeBaseLocator nestedArchiveLocator = classFactory.createNestedArchiveCodeBaseLocator(codeBase,
entry.getResourceName());
addToWorkList(workList,
new WorkListItem(nestedArchiveLocator, codeBase.isApplicationCodeBase(), ICodeBase.Discovered.NESTED));
}
}
} }
|
public class class_name {
public static void scan(final Class<? extends Annotation> clas, final String[] basePackage, final AnnotationHandler handler) {
final ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider(false);
scanner.setResourceLoader(new PathMatchingResourcePatternResolver(Thread.currentThread().getContextClassLoader()));
scanner.addIncludeFilter(new AnnotationTypeFilter(clas));
for (final String pck : basePackage) {
for (final BeanDefinition bd : scanner.findCandidateComponents(pck)) {
handler.handleAnnotationFound(bd.getBeanClassName());
}
}
} }
|
public class class_name {
public static void scan(final Class<? extends Annotation> clas, final String[] basePackage, final AnnotationHandler handler) {
final ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider(false);
scanner.setResourceLoader(new PathMatchingResourcePatternResolver(Thread.currentThread().getContextClassLoader()));
scanner.addIncludeFilter(new AnnotationTypeFilter(clas));
for (final String pck : basePackage) {
for (final BeanDefinition bd : scanner.findCandidateComponents(pck)) {
handler.handleAnnotationFound(bd.getBeanClassName()); // depends on control dependency: [for], data = [bd]
}
}
} }
|
public class class_name {
public static String versionInfoString(Detail detail) {
StringBuilder sb = new StringBuilder();
for(VersionInfo grp : getVersionInfos()){
sb.append(grp.getGroupId()).append(" : ").append(grp.getArtifactId()).append(" : ").append(grp.getBuildVersion());
switch (detail){
case FULL:
case GAVC:
sb.append(" - ").append(grp.getCommitIdAbbrev());
if(detail != Detail.FULL) break;
sb.append("buildTime=").append(grp.getBuildTime()).append("branch=").append(grp.getBranch())
.append("commitMsg=").append(grp.getCommitMessageShort());
}
sb.append("\n");
}
return sb.toString();
} }
|
public class class_name {
public static String versionInfoString(Detail detail) {
StringBuilder sb = new StringBuilder();
for(VersionInfo grp : getVersionInfos()){
sb.append(grp.getGroupId()).append(" : ").append(grp.getArtifactId()).append(" : ").append(grp.getBuildVersion()); // depends on control dependency: [for], data = [grp]
switch (detail){
case FULL:
case GAVC:
sb.append(" - ").append(grp.getCommitIdAbbrev());
if(detail != Detail.FULL) break;
sb.append("buildTime=").append(grp.getBuildTime()).append("branch=").append(grp.getBranch())
.append("commitMsg=").append(grp.getCommitMessageShort());
}
sb.append("\n"); // depends on control dependency: [for], data = [none]
}
return sb.toString();
} }
|
public class class_name {
public static NotImplemented of(Throwable cause) {
if (_localizedErrorMsg()) {
return of(cause, defaultMessage(NOT_IMPLEMENTED));
} else {
touchPayload().cause(cause);
return _INSTANCE;
}
} }
|
public class class_name {
public static NotImplemented of(Throwable cause) {
if (_localizedErrorMsg()) {
return of(cause, defaultMessage(NOT_IMPLEMENTED)); // depends on control dependency: [if], data = [none]
} else {
touchPayload().cause(cause); // depends on control dependency: [if], data = [none]
return _INSTANCE; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
protected void onStop()
{
if( m_serviceCollection != null )
{
m_serviceCollection.stop();
m_serviceCollection = null;
}
setService( null );
} }
|
public class class_name {
@Override
protected void onStop()
{
if( m_serviceCollection != null )
{
m_serviceCollection.stop(); // depends on control dependency: [if], data = [none]
m_serviceCollection = null; // depends on control dependency: [if], data = [none]
}
setService( null );
} }
|
public class class_name {
@Override
public int getBondOrderSum() {
int sum = 0;
for (int i = 0; i < getBondCount(); i++) {
IBond.Order order = getBond(i).getOrder();
if (order != null) {
sum += order.numeric();
}
}
return sum;
} }
|
public class class_name {
@Override
public int getBondOrderSum() {
int sum = 0;
for (int i = 0; i < getBondCount(); i++) {
IBond.Order order = getBond(i).getOrder();
if (order != null) {
sum += order.numeric(); // depends on control dependency: [if], data = [none]
}
}
return sum;
} }
|
public class class_name {
public boolean setAndCheckDiscoveredPartition(KafkaTopicPartition partition) {
if (isUndiscoveredPartition(partition)) {
discoveredPartitions.add(partition);
return KafkaTopicPartitionAssigner.assign(partition, numParallelSubtasks) == indexOfThisSubtask;
}
return false;
} }
|
public class class_name {
public boolean setAndCheckDiscoveredPartition(KafkaTopicPartition partition) {
if (isUndiscoveredPartition(partition)) {
discoveredPartitions.add(partition); // depends on control dependency: [if], data = [none]
return KafkaTopicPartitionAssigner.assign(partition, numParallelSubtasks) == indexOfThisSubtask; // depends on control dependency: [if], data = [none]
}
return false;
} }
|
public class class_name {
public static void construct(BaseMessageEndpointFactory factory,
MessageEndpointBase messageEndpointBase,
int recoveryId,
EJSContainer container,
BeanMetaData beanMetaData,
EJBPMICollaborator pmiBean,
WrapperManager wrapperManager,
boolean rrsTransactional) //d219252x
{
// Initialize superclass with the wrapper information.
messageEndpointBase.container = container;
messageEndpointBase.wrapperManager = wrapperManager;
messageEndpointBase.beanId = new BeanId(factory, null, false);
messageEndpointBase.bmd = beanMetaData;
messageEndpointBase.ivPmiBean = pmiBean;
messageEndpointBase.isolationAttrs = null;
messageEndpointBase.ivCommon = null; // Not a cached wrapper. d174057.2
messageEndpointBase.isManagedWrapper = false; // Not a managed wrapper. d174057.2
messageEndpointBase.methodInfos = messageEndpointBase.bmd.localMethodInfos;
messageEndpointBase.methodNames = messageEndpointBase.bmd.localMethodNames;
messageEndpointBase.ivInterface = WrapperInterface.MESSAGE_LISTENER; // d366807
// Initialize this objects instance variables.
messageEndpointBase.ivRecoveryId = recoveryId;
messageEndpointBase.ivMessageEndpointFactory = factory;
messageEndpointBase.ivTransactionManager = EmbeddableTransactionManagerFactory.getTransactionManager();
messageEndpointBase.ivRRSTransactional = rrsTransactional;
//d456256 start
// If there is only 1 method in the EJBMethodInfo for the
// message listener interface, then set ivSingleMethodInterface
// to true.
if (messageEndpointBase.methodInfos.length == 1)
{
messageEndpointBase.ivSingleMethodInterface = true;
}
// d456256 end
} }
|
public class class_name {
public static void construct(BaseMessageEndpointFactory factory,
MessageEndpointBase messageEndpointBase,
int recoveryId,
EJSContainer container,
BeanMetaData beanMetaData,
EJBPMICollaborator pmiBean,
WrapperManager wrapperManager,
boolean rrsTransactional) //d219252x
{
// Initialize superclass with the wrapper information.
messageEndpointBase.container = container;
messageEndpointBase.wrapperManager = wrapperManager;
messageEndpointBase.beanId = new BeanId(factory, null, false);
messageEndpointBase.bmd = beanMetaData;
messageEndpointBase.ivPmiBean = pmiBean;
messageEndpointBase.isolationAttrs = null;
messageEndpointBase.ivCommon = null; // Not a cached wrapper. d174057.2
messageEndpointBase.isManagedWrapper = false; // Not a managed wrapper. d174057.2
messageEndpointBase.methodInfos = messageEndpointBase.bmd.localMethodInfos;
messageEndpointBase.methodNames = messageEndpointBase.bmd.localMethodNames;
messageEndpointBase.ivInterface = WrapperInterface.MESSAGE_LISTENER; // d366807
// Initialize this objects instance variables.
messageEndpointBase.ivRecoveryId = recoveryId;
messageEndpointBase.ivMessageEndpointFactory = factory;
messageEndpointBase.ivTransactionManager = EmbeddableTransactionManagerFactory.getTransactionManager();
messageEndpointBase.ivRRSTransactional = rrsTransactional;
//d456256 start
// If there is only 1 method in the EJBMethodInfo for the
// message listener interface, then set ivSingleMethodInterface
// to true.
if (messageEndpointBase.methodInfos.length == 1)
{
messageEndpointBase.ivSingleMethodInterface = true; // depends on control dependency: [if], data = [none]
}
// d456256 end
} }
|
public class class_name {
public static ArrayList<String> updateDatabaseSchemaFromFile( File file, DatabaseContext ctx, boolean fDoDelete, boolean fReallyExecute )
{
log.info( "Updating database schema from file " + file.getAbsolutePath() );
try
{
StringBuilder targetJson = new StringBuilder();
Scanner scanner = new Scanner( new FileInputStream( file ) );
try
{
while( scanner.hasNextLine() )
targetJson.append( scanner.nextLine() );
}
finally
{
scanner.close();
}
Gson gson = new Gson();
DatabaseDescription targetDatabase = gson.fromJson( targetJson.toString(), DatabaseDescription.class );
if( targetDatabase == null )
{
log.error( "Cannot parse " + file.getAbsolutePath() + " to update DB, aborting schema update !" );
return null;
}
DatabaseDescriptionInspector inspector = new DatabaseDescriptionInspector();
DatabaseDescription dbDesc = inspector.getDatabaseDescription( ctx.db, ctx.dbh );
ArrayList<String> sqls = inspector.getSqlForUpdateDb( dbDesc, targetDatabase, fDoDelete, true/*
* table
* upper
* case
*/);
if( sqls != null && !sqls.isEmpty() )
{
log.info( " ... Needed to update database schema:" );
if( fReallyExecute )
{
for( String sql : sqls )
{
log.info( " ... Executing " + sql );
ctx.db.sqlUpdate( sql );
log.info( " --- ok" );
}
}
else
{
for( String sql : sqls )
log.info( sql );
}
}
log.info( " ... Your database schema is up to date" );
return sqls;
}
catch( FileNotFoundException exception )
{
log.info( " ... " + file.getAbsolutePath() + " does not exist to update the database schema !" );
return null;
}
} }
|
public class class_name {
public static ArrayList<String> updateDatabaseSchemaFromFile( File file, DatabaseContext ctx, boolean fDoDelete, boolean fReallyExecute )
{
log.info( "Updating database schema from file " + file.getAbsolutePath() );
try
{
StringBuilder targetJson = new StringBuilder();
Scanner scanner = new Scanner( new FileInputStream( file ) );
try
{
while( scanner.hasNextLine() )
targetJson.append( scanner.nextLine() );
}
finally
{
scanner.close();
}
Gson gson = new Gson();
DatabaseDescription targetDatabase = gson.fromJson( targetJson.toString(), DatabaseDescription.class );
if( targetDatabase == null )
{
log.error( "Cannot parse " + file.getAbsolutePath() + " to update DB, aborting schema update !" ); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
DatabaseDescriptionInspector inspector = new DatabaseDescriptionInspector();
DatabaseDescription dbDesc = inspector.getDatabaseDescription( ctx.db, ctx.dbh );
ArrayList<String> sqls = inspector.getSqlForUpdateDb( dbDesc, targetDatabase, fDoDelete, true/*
* table
* upper
* case
*/);
if( sqls != null && !sqls.isEmpty() )
{
log.info( " ... Needed to update database schema:" ); // depends on control dependency: [if], data = [none]
if( fReallyExecute )
{
for( String sql : sqls )
{
log.info( " ... Executing " + sql ); // depends on control dependency: [for], data = [sql]
ctx.db.sqlUpdate( sql ); // depends on control dependency: [for], data = [sql]
log.info( " --- ok" ); // depends on control dependency: [for], data = [none]
}
}
else
{
for( String sql : sqls )
log.info( sql );
}
}
log.info( " ... Your database schema is up to date" ); // depends on control dependency: [try], data = [none]
return sqls; // depends on control dependency: [try], data = [none]
}
catch( FileNotFoundException exception )
{
log.info( " ... " + file.getAbsolutePath() + " does not exist to update the database schema !" );
return null;
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public synchronized void printStatistics() {
ClientStats stats = m_periodicStatsContext.fetchAndResetBaseline().getStats();
long time = Math.round((stats.getEndTimestamp() - m_benchmarkStartTS) / 1000.0);
System.out.printf("%02d:%02d:%02d ", time / 3600, (time / 60) % 60, time % 60);
System.out.printf("Throughput %d/s, ", stats.getTxnThroughput());
System.out.printf("Aborts/Failures %d/%d",
stats.getInvocationAborts(), stats.getInvocationErrors());
if(m_config.latencyreport) {
System.out.printf(", Avg/95%% Latency %.2f/%.2fms", stats.getAverageLatency(),
stats.kPercentileLatencyAsDouble(0.95));
}
System.out.printf("\n");
VoltTable[] tables = null;
try {
tables = m_client.callProcedure("GetStats", m_config.displayinterval).getResults();
} catch (IOException | ProcCallException e) {
e.printStackTrace();
System.exit(1);
}
VoltTable hits = tables[0];
hits.advanceRow();
assert(hits.getLong(0) == 0);
long unmetRequests = hits.getLong(1);
hits.advanceRow();
assert(hits.getLong(0) == 1);
long metRequests = hits.getLong(1);
long totalRequests = unmetRequests + metRequests;
double percentMet = (((double)metRequests) / totalRequests) * 100.0;
System.out.printf("Total number of ad requests: %d, %3.2f%% resulted in an ad being served\n",
totalRequests, percentMet);
VoltTable recentAdvertisers = tables[1];
System.out.println("\nTop 5 advertisers of the last " + m_config.displayinterval + " seconds, "
+ "by sorted on the sum of dollar amounts of bids won:");
System.out.println("Advertiser Revenue Count");
System.out.println("---------- ------- -----");
while (recentAdvertisers.advanceRow()) {
System.out.printf("%-40s %9.2f %d\n",
recentAdvertisers.getString(0),
recentAdvertisers.getDouble(1),
recentAdvertisers.getLong(2));
}
System.out.println();
} }
|
public class class_name {
public synchronized void printStatistics() {
ClientStats stats = m_periodicStatsContext.fetchAndResetBaseline().getStats();
long time = Math.round((stats.getEndTimestamp() - m_benchmarkStartTS) / 1000.0);
System.out.printf("%02d:%02d:%02d ", time / 3600, (time / 60) % 60, time % 60);
System.out.printf("Throughput %d/s, ", stats.getTxnThroughput());
System.out.printf("Aborts/Failures %d/%d",
stats.getInvocationAborts(), stats.getInvocationErrors());
if(m_config.latencyreport) {
System.out.printf(", Avg/95%% Latency %.2f/%.2fms", stats.getAverageLatency(),
stats.kPercentileLatencyAsDouble(0.95)); // depends on control dependency: [if], data = [none]
}
System.out.printf("\n");
VoltTable[] tables = null;
try {
tables = m_client.callProcedure("GetStats", m_config.displayinterval).getResults(); // depends on control dependency: [try], data = [none]
} catch (IOException | ProcCallException e) {
e.printStackTrace();
System.exit(1);
} // depends on control dependency: [catch], data = [none]
VoltTable hits = tables[0];
hits.advanceRow();
assert(hits.getLong(0) == 0);
long unmetRequests = hits.getLong(1);
hits.advanceRow();
assert(hits.getLong(0) == 1);
long metRequests = hits.getLong(1);
long totalRequests = unmetRequests + metRequests;
double percentMet = (((double)metRequests) / totalRequests) * 100.0;
System.out.printf("Total number of ad requests: %d, %3.2f%% resulted in an ad being served\n",
totalRequests, percentMet);
VoltTable recentAdvertisers = tables[1];
System.out.println("\nTop 5 advertisers of the last " + m_config.displayinterval + " seconds, "
+ "by sorted on the sum of dollar amounts of bids won:");
System.out.println("Advertiser Revenue Count");
System.out.println("---------- ------- -----");
while (recentAdvertisers.advanceRow()) {
System.out.printf("%-40s %9.2f %d\n",
recentAdvertisers.getString(0),
recentAdvertisers.getDouble(1),
recentAdvertisers.getLong(2));
}
System.out.println();
} }
|
public class class_name {
@Nullable
public final Node removeFirstChild() {
Node child = first;
if (child != null) {
removeChild(child);
}
return child;
} }
|
public class class_name {
@Nullable
public final Node removeFirstChild() {
Node child = first;
if (child != null) {
removeChild(child); // depends on control dependency: [if], data = [(child]
}
return child;
} }
|
public class class_name {
public ListBulkDeploymentsResult withBulkDeployments(BulkDeployment... bulkDeployments) {
if (this.bulkDeployments == null) {
setBulkDeployments(new java.util.ArrayList<BulkDeployment>(bulkDeployments.length));
}
for (BulkDeployment ele : bulkDeployments) {
this.bulkDeployments.add(ele);
}
return this;
} }
|
public class class_name {
public ListBulkDeploymentsResult withBulkDeployments(BulkDeployment... bulkDeployments) {
if (this.bulkDeployments == null) {
setBulkDeployments(new java.util.ArrayList<BulkDeployment>(bulkDeployments.length)); // depends on control dependency: [if], data = [none]
}
for (BulkDeployment ele : bulkDeployments) {
this.bulkDeployments.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
public static String toJson(HppRequest hppRequest) {
try {
return hppRequestWriter.writeValueAsString(hppRequest);
} catch (JsonProcessingException ex) {
LOGGER.error("Error writing HppRequest to JSON.", ex);
throw new RealexException("Error writing HppRequest to JSON.", ex);
}
} }
|
public class class_name {
public static String toJson(HppRequest hppRequest) {
try {
return hppRequestWriter.writeValueAsString(hppRequest); // depends on control dependency: [try], data = [none]
} catch (JsonProcessingException ex) {
LOGGER.error("Error writing HppRequest to JSON.", ex);
throw new RealexException("Error writing HppRequest to JSON.", ex);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
void addDestination(final int transportIndex, final ReceiveDestinationUdpTransport transport)
{
imageConnections = ArrayUtil.ensureCapacity(imageConnections, transportIndex + 1);
if (transport.isMulticast())
{
imageConnections[transportIndex] = new ImageConnection(
cachedNanoClock.nanoTime(), transport.udpChannel().remoteControl());
}
else if (transport.hasExplicitControl())
{
imageConnections[transportIndex] = new ImageConnection(
cachedNanoClock.nanoTime(), transport.explicitControlAddress());
}
} }
|
public class class_name {
void addDestination(final int transportIndex, final ReceiveDestinationUdpTransport transport)
{
imageConnections = ArrayUtil.ensureCapacity(imageConnections, transportIndex + 1);
if (transport.isMulticast())
{
imageConnections[transportIndex] = new ImageConnection(
cachedNanoClock.nanoTime(), transport.udpChannel().remoteControl()); // depends on control dependency: [if], data = [none]
}
else if (transport.hasExplicitControl())
{
imageConnections[transportIndex] = new ImageConnection(
cachedNanoClock.nanoTime(), transport.explicitControlAddress()); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public byte[] getExtensionValue(String oid) {
try {
ObjectIdentifier findOID = new ObjectIdentifier(oid);
String extAlias = OIDMap.getName(findOID);
Extension certExt = null;
CertificateExtensions exts = (CertificateExtensions)info.get(
CertificateExtensions.NAME);
if (extAlias == null) { // may be unknown
// get the extensions, search thru' for this oid
if (exts == null) {
return null;
}
for (Extension ex : exts.getAllExtensions()) {
ObjectIdentifier inCertOID = ex.getExtensionId();
if (inCertOID.equals((Object)findOID)) {
certExt = ex;
break;
}
}
} else { // there's sub-class that can handle this extension
try {
certExt = (Extension)this.get(extAlias);
} catch (CertificateException e) {
// get() throws an Exception instead of returning null, ignore
}
}
if (certExt == null) {
if (exts != null) {
certExt = exts.getUnparseableExtensions().get(oid);
}
if (certExt == null) {
return null;
}
}
byte[] extData = certExt.getExtensionValue();
if (extData == null) {
return null;
}
DerOutputStream out = new DerOutputStream();
out.putOctetString(extData);
return out.toByteArray();
} catch (Exception e) {
return null;
}
} }
|
public class class_name {
public byte[] getExtensionValue(String oid) {
try {
ObjectIdentifier findOID = new ObjectIdentifier(oid);
String extAlias = OIDMap.getName(findOID);
Extension certExt = null;
CertificateExtensions exts = (CertificateExtensions)info.get(
CertificateExtensions.NAME);
if (extAlias == null) { // may be unknown
// get the extensions, search thru' for this oid
if (exts == null) {
return null; // depends on control dependency: [if], data = [none]
}
for (Extension ex : exts.getAllExtensions()) {
ObjectIdentifier inCertOID = ex.getExtensionId();
if (inCertOID.equals((Object)findOID)) {
certExt = ex; // depends on control dependency: [if], data = [none]
break;
}
}
} else { // there's sub-class that can handle this extension
try {
certExt = (Extension)this.get(extAlias); // depends on control dependency: [try], data = [none]
} catch (CertificateException e) {
// get() throws an Exception instead of returning null, ignore
} // depends on control dependency: [catch], data = [none]
}
if (certExt == null) {
if (exts != null) {
certExt = exts.getUnparseableExtensions().get(oid); // depends on control dependency: [if], data = [none]
}
if (certExt == null) {
return null; // depends on control dependency: [if], data = [none]
}
}
byte[] extData = certExt.getExtensionValue();
if (extData == null) {
return null; // depends on control dependency: [if], data = [none]
}
DerOutputStream out = new DerOutputStream();
out.putOctetString(extData); // depends on control dependency: [try], data = [none]
return out.toByteArray(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
return null;
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static boolean isRegistered(IpCamDevice ipcam) {
if (ipcam == null) {
throw new IllegalArgumentException("IP camera device cannot be null");
}
Iterator<IpCamDevice> di = DEVICES.iterator();
while (di.hasNext()) {
if (di.next().getName().equals(ipcam.getName())) {
return true;
}
}
return false;
} }
|
public class class_name {
public static boolean isRegistered(IpCamDevice ipcam) {
if (ipcam == null) {
throw new IllegalArgumentException("IP camera device cannot be null");
}
Iterator<IpCamDevice> di = DEVICES.iterator();
while (di.hasNext()) {
if (di.next().getName().equals(ipcam.getName())) {
return true;
// depends on control dependency: [if], data = [none]
}
}
return false;
} }
|
public class class_name {
private void offsetMenu(int offsetPixels) {
if (!mOffsetMenu || mMenuSize == 0) {
return;
}
final int width = getWidth();
final int height = getHeight();
final int menuSize = mMenuSize;
final int sign = (int) (mOffsetPixels / Math.abs(mOffsetPixels));
final float openRatio = Math.abs(mOffsetPixels) / menuSize;
final int offset = (int) (-0.25f * ((1.0f - openRatio) * menuSize) * sign);
switch (getPosition()) {
case LEFT: {
if (USE_TRANSLATIONS) {
if (offsetPixels > 0) {
mMenuContainer.setTranslationX(offset);
} else {
mMenuContainer.setTranslationX(-menuSize);
}
} else {
mMenuContainer.offsetLeftAndRight(offset - mMenuContainer.getLeft());
mMenuContainer.setVisibility(offsetPixels == 0 ? INVISIBLE : VISIBLE);
}
break;
}
case RIGHT: {
if (USE_TRANSLATIONS) {
if (offsetPixels != 0) {
mMenuContainer.setTranslationX(offset);
} else {
mMenuContainer.setTranslationX(menuSize);
}
} else {
final int oldOffset = mMenuContainer.getRight() - width;
final int offsetBy = offset - oldOffset;
mMenuContainer.offsetLeftAndRight(offsetBy);
mMenuContainer.setVisibility(offsetPixels == 0 ? INVISIBLE : VISIBLE);
}
break;
}
case TOP: {
if (USE_TRANSLATIONS) {
if (offsetPixels > 0) {
mMenuContainer.setTranslationY(offset);
} else {
mMenuContainer.setTranslationY(-menuSize);
}
} else {
mMenuContainer.offsetTopAndBottom(offset - mMenuContainer.getTop());
mMenuContainer.setVisibility(offsetPixels == 0 ? INVISIBLE : VISIBLE);
}
break;
}
case BOTTOM: {
if (USE_TRANSLATIONS) {
if (offsetPixels != 0) {
mMenuContainer.setTranslationY(offset);
} else {
mMenuContainer.setTranslationY(menuSize);
}
} else {
final int oldOffset = mMenuContainer.getBottom() - height;
final int offsetBy = offset - oldOffset;
mMenuContainer.offsetTopAndBottom(offsetBy);
mMenuContainer.setVisibility(offsetPixels == 0 ? INVISIBLE : VISIBLE);
}
break;
}
}
} }
|
public class class_name {
private void offsetMenu(int offsetPixels) {
if (!mOffsetMenu || mMenuSize == 0) {
return; // depends on control dependency: [if], data = [none]
}
final int width = getWidth();
final int height = getHeight();
final int menuSize = mMenuSize;
final int sign = (int) (mOffsetPixels / Math.abs(mOffsetPixels));
final float openRatio = Math.abs(mOffsetPixels) / menuSize;
final int offset = (int) (-0.25f * ((1.0f - openRatio) * menuSize) * sign);
switch (getPosition()) {
case LEFT: {
if (USE_TRANSLATIONS) {
if (offsetPixels > 0) {
mMenuContainer.setTranslationX(offset); // depends on control dependency: [if], data = [none]
} else {
mMenuContainer.setTranslationX(-menuSize); // depends on control dependency: [if], data = [none]
}
} else {
mMenuContainer.offsetLeftAndRight(offset - mMenuContainer.getLeft()); // depends on control dependency: [if], data = [none]
mMenuContainer.setVisibility(offsetPixels == 0 ? INVISIBLE : VISIBLE); // depends on control dependency: [if], data = [none]
}
break;
}
case RIGHT: {
if (USE_TRANSLATIONS) {
if (offsetPixels != 0) {
mMenuContainer.setTranslationX(offset); // depends on control dependency: [if], data = [none]
} else {
mMenuContainer.setTranslationX(menuSize); // depends on control dependency: [if], data = [none]
}
} else {
final int oldOffset = mMenuContainer.getRight() - width;
final int offsetBy = offset - oldOffset;
mMenuContainer.offsetLeftAndRight(offsetBy);
mMenuContainer.setVisibility(offsetPixels == 0 ? INVISIBLE : VISIBLE);
}
break;
}
case TOP: {
if (USE_TRANSLATIONS) {
if (offsetPixels > 0) {
mMenuContainer.setTranslationY(offset);
} else {
mMenuContainer.setTranslationY(-menuSize);
}
} else {
mMenuContainer.offsetTopAndBottom(offset - mMenuContainer.getTop());
mMenuContainer.setVisibility(offsetPixels == 0 ? INVISIBLE : VISIBLE);
}
break;
}
case BOTTOM: {
if (USE_TRANSLATIONS) {
if (offsetPixels != 0) {
mMenuContainer.setTranslationY(offset);
} else {
mMenuContainer.setTranslationY(menuSize);
}
} else {
final int oldOffset = mMenuContainer.getBottom() - height;
final int offsetBy = offset - oldOffset;
mMenuContainer.offsetTopAndBottom(offsetBy);
mMenuContainer.setVisibility(offsetPixels == 0 ? INVISIBLE : VISIBLE);
}
break;
}
}
} }
|
public class class_name {
public static LibertyVersion valueOf(String versionString) {
if (versionString == null) {
return null;
}
versionString = versionString.trim();
Matcher versionMatcher = VALID_VERSION.matcher(versionString);
if (!versionMatcher.matches()) {
return null;
}
return new LibertyVersion(Integer.parseInt(versionMatcher.group(1)), Integer.parseInt(versionMatcher.group(2)), Integer.parseInt(versionMatcher.group(3)), Integer.parseInt(versionMatcher.group(4)));
} }
|
public class class_name {
public static LibertyVersion valueOf(String versionString) {
if (versionString == null) {
return null; // depends on control dependency: [if], data = [none]
}
versionString = versionString.trim();
Matcher versionMatcher = VALID_VERSION.matcher(versionString);
if (!versionMatcher.matches()) {
return null; // depends on control dependency: [if], data = [none]
}
return new LibertyVersion(Integer.parseInt(versionMatcher.group(1)), Integer.parseInt(versionMatcher.group(2)), Integer.parseInt(versionMatcher.group(3)), Integer.parseInt(versionMatcher.group(4)));
} }
|
public class class_name {
private void appendParameter(Object value, StringBuffer buf)
{
if (value instanceof Query)
{
appendSubQuery((Query) value, buf);
}
else
{
buf.append("?");
}
} }
|
public class class_name {
private void appendParameter(Object value, StringBuffer buf)
{
if (value instanceof Query)
{
appendSubQuery((Query) value, buf);
// depends on control dependency: [if], data = [none]
}
else
{
buf.append("?");
// depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
protected Member getMember(PageContext pc, Collection.Key key, boolean dataMember, boolean superAccess) {
// check super
if (dataMember && key.equalsIgnoreCase(KeyConstants._super) && isPrivate(pc)) {
Component ac = ComponentUtil.getActiveComponent(pc, this);
return SuperComponent.superMember((ComponentImpl) ac.getBaseComponent());
}
if (superAccess) return _udfs.get(key);
// check data
Member member = _data.get(key);
if (isAccessible(pc, member)) return member;
return null;
} }
|
public class class_name {
protected Member getMember(PageContext pc, Collection.Key key, boolean dataMember, boolean superAccess) {
// check super
if (dataMember && key.equalsIgnoreCase(KeyConstants._super) && isPrivate(pc)) {
Component ac = ComponentUtil.getActiveComponent(pc, this);
return SuperComponent.superMember((ComponentImpl) ac.getBaseComponent()); // depends on control dependency: [if], data = [none]
}
if (superAccess) return _udfs.get(key);
// check data
Member member = _data.get(key);
if (isAccessible(pc, member)) return member;
return null;
} }
|
public class class_name {
public static Type createInstance(String name, float min, float max)
{
// Ensure that min is less than or equal to max.
if (min > max)
{
throw new IllegalArgumentException("'min' must be less than or equal to 'max'.");
}
synchronized (FLOAT_RANGE_TYPES)
{
// Add the newly created type to the map of all types.
FloatRangeType newType = new FloatRangeType(name, min, max);
// Ensure that the named type does not already exist, unless it has an identical definition already, in which
// case the old definition can be re-used and the new one discarded.
FloatRangeType oldType = FLOAT_RANGE_TYPES.get(name);
if ((oldType != null) && !oldType.equals(newType))
{
throw new IllegalArgumentException("The type '" + name + "' already exists and cannot be redefined.");
}
else if ((oldType != null) && oldType.equals(newType))
{
return oldType;
}
else
{
FLOAT_RANGE_TYPES.put(name, newType);
return newType;
}
}
} }
|
public class class_name {
public static Type createInstance(String name, float min, float max)
{
// Ensure that min is less than or equal to max.
if (min > max)
{
throw new IllegalArgumentException("'min' must be less than or equal to 'max'.");
}
synchronized (FLOAT_RANGE_TYPES)
{
// Add the newly created type to the map of all types.
FloatRangeType newType = new FloatRangeType(name, min, max);
// Ensure that the named type does not already exist, unless it has an identical definition already, in which
// case the old definition can be re-used and the new one discarded.
FloatRangeType oldType = FLOAT_RANGE_TYPES.get(name);
if ((oldType != null) && !oldType.equals(newType))
{
throw new IllegalArgumentException("The type '" + name + "' already exists and cannot be redefined.");
}
else if ((oldType != null) && oldType.equals(newType))
{
return oldType; // depends on control dependency: [if], data = [none]
}
else
{
FLOAT_RANGE_TYPES.put(name, newType); // depends on control dependency: [if], data = [none]
return newType; // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public void setComponentType(JvmTypeReference newComponentType)
{
if (newComponentType != componentType)
{
NotificationChain msgs = null;
if (componentType != null)
msgs = ((InternalEObject)componentType).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - TypesPackage.JVM_GENERIC_ARRAY_TYPE_REFERENCE__COMPONENT_TYPE, null, msgs);
if (newComponentType != null)
msgs = ((InternalEObject)newComponentType).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - TypesPackage.JVM_GENERIC_ARRAY_TYPE_REFERENCE__COMPONENT_TYPE, null, msgs);
msgs = basicSetComponentType(newComponentType, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, TypesPackage.JVM_GENERIC_ARRAY_TYPE_REFERENCE__COMPONENT_TYPE, newComponentType, newComponentType));
} }
|
public class class_name {
public void setComponentType(JvmTypeReference newComponentType)
{
if (newComponentType != componentType)
{
NotificationChain msgs = null;
if (componentType != null)
msgs = ((InternalEObject)componentType).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - TypesPackage.JVM_GENERIC_ARRAY_TYPE_REFERENCE__COMPONENT_TYPE, null, msgs);
if (newComponentType != null)
msgs = ((InternalEObject)newComponentType).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - TypesPackage.JVM_GENERIC_ARRAY_TYPE_REFERENCE__COMPONENT_TYPE, null, msgs);
msgs = basicSetComponentType(newComponentType, msgs); // depends on control dependency: [if], data = [(newComponentType]
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, TypesPackage.JVM_GENERIC_ARRAY_TYPE_REFERENCE__COMPONENT_TYPE, newComponentType, newComponentType));
} }
|
public class class_name {
private static Url resolveUrl(final Url baseUrl, final String relativeUrl) {
final Url url = parseUrl(relativeUrl);
// Step 1: The base URL is established according to the rules of
// Section 3. If the base URL is the empty string (unknown),
// the embedded URL is interpreted as an absolute URL and
// we are done.
if (baseUrl == null) {
return url;
}
// Step 2: Both the base and embedded URLs are parsed into their component parts as
// described in Section 2.4.
// a) If the embedded URL is entirely empty, it inherits the
// entire base URL (i.e., is set equal to the base URL) and we are done.
if (relativeUrl.length() == 0) {
return new Url(baseUrl);
}
// b) If the embedded URL starts with a scheme name, it is interpreted as an absolute URL
// and we are done.
if (url.scheme_ != null) {
return url;
}
// c) Otherwise, the embedded URL inherits the scheme of the base URL.
url.scheme_ = baseUrl.scheme_;
// Step 3: If the embedded URL's <net_loc> is non-empty, we skip to Step 7. Otherwise, the
// embedded URL inherits the <net_loc> (if any) of the base URL.
if (url.location_ != null) {
return url;
}
url.location_ = baseUrl.location_;
// Step 4: If the embedded URL path is preceded by a slash "/", the
// path is not relative and we skip to Step 7.
if ((url.path_ != null) && ((url.path_.length() > 0) && ('/' == url.path_.charAt(0)))) {
url.path_ = removeLeadingSlashPoints(url.path_);
return url;
}
// Step 5: If the embedded URL path is empty (and not preceded by a
// slash), then the embedded URL inherits the base URL path,
// and
if (url.path_ == null) {
url.path_ = baseUrl.path_;
// a) if the embedded URL's <params> is non-empty, we skip to
// step 7; otherwise, it inherits the <params> of the base
// URL (if any) and
if (url.parameters_ != null) {
return url;
}
url.parameters_ = baseUrl.parameters_;
// b) if the embedded URL's <query> is non-empty, we skip to
// step 7; otherwise, it inherits the <query> of the base
// URL (if any) and we skip to step 7.
if (url.query_ != null) {
return url;
}
url.query_ = baseUrl.query_;
return url;
}
// Step 6: The last segment of the base URL's path (anything
// following the rightmost slash "/", or the entire path if no
// slash is present) is removed and the embedded URL's path is
// appended in its place.
final String basePath = baseUrl.path_;
String path = "";
if (basePath != null) {
final int lastSlashIndex = basePath.lastIndexOf('/');
if (lastSlashIndex >= 0) {
path = basePath.substring(0, lastSlashIndex + 1);
}
} else {
path = "/";
}
path = path.concat(url.path_);
// The following operations are then applied, in order, to the new path:
// a) All occurrences of "./", where "." is a complete path segment, are removed.
int pathSegmentIndex;
while ((pathSegmentIndex = path.indexOf("/./")) >= 0) {
path = path.substring(0, pathSegmentIndex + 1).concat(path.substring(pathSegmentIndex + 3));
}
// b) If the path ends with "." as a complete path segment, that "." is removed.
if (path.endsWith("/.")) {
path = path.substring(0, path.length() - 1);
}
// c) All occurrences of "<segment>/../", where <segment> is a complete path segment not
// equal to "..", are removed. Removal of these path segments is performed iteratively,
// removing the leftmost matching pattern on each iteration, until no matching pattern
// remains.
while ((pathSegmentIndex = path.indexOf("/../")) > 0) {
final String pathSegment = path.substring(0, pathSegmentIndex);
final int slashIndex = pathSegment.lastIndexOf('/');
if (slashIndex < 0) {
continue;
}
if (!"..".equals(pathSegment.substring(slashIndex))) {
path = path.substring(0, slashIndex + 1).concat(path.substring(pathSegmentIndex + 4));
}
}
// d) If the path ends with "<segment>/..", where <segment> is a complete path segment not
// equal to "..", that "<segment>/.." is removed.
if (path.endsWith("/..")) {
final String pathSegment = path.substring(0, path.length() - 3);
final int slashIndex = pathSegment.lastIndexOf('/');
if (slashIndex >= 0) {
path = path.substring(0, slashIndex + 1);
}
}
path = removeLeadingSlashPoints(path);
url.path_ = path;
// Step 7: The resulting URL components, including any inherited from
// the base URL, are recombined to give the absolute form of
// the embedded URL.
return url;
} }
|
public class class_name {
private static Url resolveUrl(final Url baseUrl, final String relativeUrl) {
final Url url = parseUrl(relativeUrl);
// Step 1: The base URL is established according to the rules of
// Section 3. If the base URL is the empty string (unknown),
// the embedded URL is interpreted as an absolute URL and
// we are done.
if (baseUrl == null) {
return url; // depends on control dependency: [if], data = [none]
}
// Step 2: Both the base and embedded URLs are parsed into their component parts as
// described in Section 2.4.
// a) If the embedded URL is entirely empty, it inherits the
// entire base URL (i.e., is set equal to the base URL) and we are done.
if (relativeUrl.length() == 0) {
return new Url(baseUrl); // depends on control dependency: [if], data = [none]
}
// b) If the embedded URL starts with a scheme name, it is interpreted as an absolute URL
// and we are done.
if (url.scheme_ != null) {
return url; // depends on control dependency: [if], data = [none]
}
// c) Otherwise, the embedded URL inherits the scheme of the base URL.
url.scheme_ = baseUrl.scheme_;
// Step 3: If the embedded URL's <net_loc> is non-empty, we skip to Step 7. Otherwise, the
// embedded URL inherits the <net_loc> (if any) of the base URL.
if (url.location_ != null) {
return url; // depends on control dependency: [if], data = [none]
}
url.location_ = baseUrl.location_;
// Step 4: If the embedded URL path is preceded by a slash "/", the
// path is not relative and we skip to Step 7.
if ((url.path_ != null) && ((url.path_.length() > 0) && ('/' == url.path_.charAt(0)))) {
url.path_ = removeLeadingSlashPoints(url.path_); // depends on control dependency: [if], data = [none]
return url; // depends on control dependency: [if], data = [none]
}
// Step 5: If the embedded URL path is empty (and not preceded by a
// slash), then the embedded URL inherits the base URL path,
// and
if (url.path_ == null) {
url.path_ = baseUrl.path_; // depends on control dependency: [if], data = [none]
// a) if the embedded URL's <params> is non-empty, we skip to
// step 7; otherwise, it inherits the <params> of the base
// URL (if any) and
if (url.parameters_ != null) {
return url; // depends on control dependency: [if], data = [none]
}
url.parameters_ = baseUrl.parameters_; // depends on control dependency: [if], data = [none]
// b) if the embedded URL's <query> is non-empty, we skip to
// step 7; otherwise, it inherits the <query> of the base
// URL (if any) and we skip to step 7.
if (url.query_ != null) {
return url; // depends on control dependency: [if], data = [none]
}
url.query_ = baseUrl.query_; // depends on control dependency: [if], data = [none]
return url; // depends on control dependency: [if], data = [none]
}
// Step 6: The last segment of the base URL's path (anything
// following the rightmost slash "/", or the entire path if no
// slash is present) is removed and the embedded URL's path is
// appended in its place.
final String basePath = baseUrl.path_;
String path = "";
if (basePath != null) {
final int lastSlashIndex = basePath.lastIndexOf('/');
if (lastSlashIndex >= 0) {
path = basePath.substring(0, lastSlashIndex + 1); // depends on control dependency: [if], data = [none]
}
} else {
path = "/"; // depends on control dependency: [if], data = [none]
}
path = path.concat(url.path_);
// The following operations are then applied, in order, to the new path:
// a) All occurrences of "./", where "." is a complete path segment, are removed.
int pathSegmentIndex;
while ((pathSegmentIndex = path.indexOf("/./")) >= 0) {
path = path.substring(0, pathSegmentIndex + 1).concat(path.substring(pathSegmentIndex + 3)); // depends on control dependency: [while], data = [none]
}
// b) If the path ends with "." as a complete path segment, that "." is removed.
if (path.endsWith("/.")) {
path = path.substring(0, path.length() - 1); // depends on control dependency: [if], data = [none]
}
// c) All occurrences of "<segment>/../", where <segment> is a complete path segment not
// equal to "..", are removed. Removal of these path segments is performed iteratively,
// removing the leftmost matching pattern on each iteration, until no matching pattern
// remains.
while ((pathSegmentIndex = path.indexOf("/../")) > 0) {
final String pathSegment = path.substring(0, pathSegmentIndex);
final int slashIndex = pathSegment.lastIndexOf('/');
if (slashIndex < 0) {
continue;
}
if (!"..".equals(pathSegment.substring(slashIndex))) {
path = path.substring(0, slashIndex + 1).concat(path.substring(pathSegmentIndex + 4)); // depends on control dependency: [if], data = [none]
}
}
// d) If the path ends with "<segment>/..", where <segment> is a complete path segment not
// equal to "..", that "<segment>/.." is removed.
if (path.endsWith("/..")) {
final String pathSegment = path.substring(0, path.length() - 3);
final int slashIndex = pathSegment.lastIndexOf('/');
if (slashIndex >= 0) {
path = path.substring(0, slashIndex + 1); // depends on control dependency: [if], data = [none]
}
}
path = removeLeadingSlashPoints(path);
url.path_ = path;
// Step 7: The resulting URL components, including any inherited from
// the base URL, are recombined to give the absolute form of
// the embedded URL.
return url;
} }
|
public class class_name {
private boolean handleWatchEvent(final boolean registerWatchKey, final WatchEventItem item) throws IOException {
final WatchEvent event = item.getEvent();
final Path root = item.getRoot();
final Path parent = item.getPath();
boolean result = true; // default is "don't cancel watch key"
try (final Tx tx = StructrApp.getInstance().tx()) {
final Path path = parent.resolve((Path)event.context());
final Kind kind = event.kind();
if (StandardWatchEventKinds.ENTRY_CREATE.equals(kind)) {
if (Files.isDirectory(path)) {
scanDirectoryTree(registerWatchKey, root, path);
}
result = listener.onCreate(root, parent, path);
} else if (StandardWatchEventKinds.ENTRY_DELETE.equals(kind)) {
result = listener.onDelete(root, parent, path);
} else if (StandardWatchEventKinds.ENTRY_MODIFY.equals(kind)) {
result = listener.onModify(root, parent, path);
}
tx.success();
} catch (FrameworkException fex) {
fex.printStackTrace();
}
return result;
} }
|
public class class_name {
private boolean handleWatchEvent(final boolean registerWatchKey, final WatchEventItem item) throws IOException {
final WatchEvent event = item.getEvent();
final Path root = item.getRoot();
final Path parent = item.getPath();
boolean result = true; // default is "don't cancel watch key"
try (final Tx tx = StructrApp.getInstance().tx()) {
final Path path = parent.resolve((Path)event.context());
final Kind kind = event.kind();
if (StandardWatchEventKinds.ENTRY_CREATE.equals(kind)) {
if (Files.isDirectory(path)) {
scanDirectoryTree(registerWatchKey, root, path); // depends on control dependency: [if], data = [none]
}
result = listener.onCreate(root, parent, path);
} else if (StandardWatchEventKinds.ENTRY_DELETE.equals(kind)) {
result = listener.onDelete(root, parent, path);
} else if (StandardWatchEventKinds.ENTRY_MODIFY.equals(kind)) {
result = listener.onModify(root, parent, path);
}
tx.success();
} catch (FrameworkException fex) {
fex.printStackTrace();
}
return result;
} }
|
public class class_name {
private JPanel getButtonPanel() {
if (buttonPanel == null) {
buttonPanel = new JPanel();
buttonPanel.setLayout(new GridBagLayout());
GridBagConstraints c1 = new GridBagConstraints();
c1.anchor = GridBagConstraints.EAST;
c1.gridx = 0;
c1.gridy = 0;
c1.weightx = 1.0D;
c1.insets = new Insets(5, 5, 5, 5);
buttonPanel.add(getOkButton(), c1);
GridBagConstraints c2 = new GridBagConstraints();
c2.gridx = 1;
c2.gridy = 0;
c2.weightx = 0.0D;
c2.insets = new Insets(5, 5, 5, 5);
buttonPanel.add(getCancelButton(), c2);
}
return buttonPanel;
} }
|
public class class_name {
private JPanel getButtonPanel() {
if (buttonPanel == null) {
buttonPanel = new JPanel(); // depends on control dependency: [if], data = [none]
buttonPanel.setLayout(new GridBagLayout()); // depends on control dependency: [if], data = [none]
GridBagConstraints c1 = new GridBagConstraints();
c1.anchor = GridBagConstraints.EAST; // depends on control dependency: [if], data = [none]
c1.gridx = 0; // depends on control dependency: [if], data = [none]
c1.gridy = 0; // depends on control dependency: [if], data = [none]
c1.weightx = 1.0D; // depends on control dependency: [if], data = [none]
c1.insets = new Insets(5, 5, 5, 5); // depends on control dependency: [if], data = [none]
buttonPanel.add(getOkButton(), c1); // depends on control dependency: [if], data = [none]
GridBagConstraints c2 = new GridBagConstraints();
c2.gridx = 1; // depends on control dependency: [if], data = [none]
c2.gridy = 0; // depends on control dependency: [if], data = [none]
c2.weightx = 0.0D; // depends on control dependency: [if], data = [none]
c2.insets = new Insets(5, 5, 5, 5); // depends on control dependency: [if], data = [none]
buttonPanel.add(getCancelButton(), c2); // depends on control dependency: [if], data = [none]
}
return buttonPanel;
} }
|
public class class_name {
public void updateSearchBean(CmsGallerySearchBean searchBean) {
m_searchBean = searchBean;
if (searchBean != null) {
m_pageSize = searchBean.getMatchesPerPage();
int lastPage = searchBean.getLastPage();
// we don't just store the search bean because it gets reused for multiple searches
// and so the result list may change.
m_resultBeans = searchBean.getResults();
if (lastPage != -1) {
loadPage(lastPage);
m_firstShownPage = lastPage;
if (lastPage > 1) {
loadPage(lastPage - 1);
m_firstShownPage = lastPage - 1;
}
}
} else {
m_resultBeans = null;
}
} }
|
public class class_name {
public void updateSearchBean(CmsGallerySearchBean searchBean) {
m_searchBean = searchBean;
if (searchBean != null) {
m_pageSize = searchBean.getMatchesPerPage();
// depends on control dependency: [if], data = [none]
int lastPage = searchBean.getLastPage();
// we don't just store the search bean because it gets reused for multiple searches
// and so the result list may change.
m_resultBeans = searchBean.getResults();
// depends on control dependency: [if], data = [none]
if (lastPage != -1) {
loadPage(lastPage);
// depends on control dependency: [if], data = [(lastPage]
m_firstShownPage = lastPage;
// depends on control dependency: [if], data = [none]
if (lastPage > 1) {
loadPage(lastPage - 1);
// depends on control dependency: [if], data = [(lastPage]
m_firstShownPage = lastPage - 1;
// depends on control dependency: [if], data = [none]
}
}
} else {
m_resultBeans = null;
// depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public void runPostProcessing(boolean multiRoot) {
// make identical vertices have the same set of properties
// two vertices are identical if they have the same address value.
// Id for every vertex is unique, so this is not a flag here
for (Set<ExecutionGraphVertex> vs : this.addressToVertices.values()) {
// the logic below is very fragile and based on some empirical model
// and may not work for other type of graphs
if (vs != null && vs.size() > 1) {
for (ExecutionGraphVertex vertex : vs) {
if (CELL_WITH_FORMULA == vertex.properties().getType() || null != vertex.properties().getAlias()) {
copyProperties(vertex, vs);
break;
}
}
}
}
// copy or link subgraphs to identical vertices and modify Formula field with additional values
Map<String, AtomicInteger> adressToCount = new HashMap<>();
for (ExecutionGraphVertex vertex : this.graph.getVertices()) {
// restore/add subgraphs to identical vertices
Type type = vertex.properties().getType();
if (isCell(type)) {
String address = vertex.properties().getName();
adressToCount.putIfAbsent(address, new AtomicInteger(0));
if (adressToCount.get(address).incrementAndGet() > 1) { // count > 1
// need to link
Set<ExecutionGraphVertex> subgraphTops = new HashSet<>();
this.graph.getVertices().stream()
.filter(v -> address.equals(v.properties.getName())) // check for subgraph
.forEach(v -> this.graph.getIncomingEdgesOf(v).forEach(e -> subgraphTops.add(this.graph.getEdgeSource(e))));
for (ExecutionGraphVertex subVertex : subgraphTops) {
if (!this.addressToVertices.containsKey(address)) { continue; }
this.addressToVertices.get(address).forEach(v -> this.graph.addEdge(subVertex, v));
}
}
}
if (RANGE == type) { connectValuesToRange(vertex, this); }
/* Adding IF Value */
if (IF == type) {
Collection<ExecutionGraphEdge> two = this.graph.getIncomingEdgesOf(vertex);
if (two.size() != 2) { throw new CalculationEngineException("IF must have only two incoming edges."); }
Object ifBranchValue = null;
for (ExecutionGraphEdge e : two) {
ExecutionGraphVertex oneOfTwo = this.graph.getEdgeSource(e);
if (!isCompareOperand(oneOfTwo.getName())) {
ifBranchValue = oneOfTwo.properties().getValue();
break;
}
}
vertex.properties().setValue(ifBranchValue);
}
}
if (this.config.getDuplicatesNumberThreshold() != -1) { removeAllDuplicates(this); }
} }
|
public class class_name {
public void runPostProcessing(boolean multiRoot) {
// make identical vertices have the same set of properties
// two vertices are identical if they have the same address value.
// Id for every vertex is unique, so this is not a flag here
for (Set<ExecutionGraphVertex> vs : this.addressToVertices.values()) {
// the logic below is very fragile and based on some empirical model
// and may not work for other type of graphs
if (vs != null && vs.size() > 1) {
for (ExecutionGraphVertex vertex : vs) {
if (CELL_WITH_FORMULA == vertex.properties().getType() || null != vertex.properties().getAlias()) {
copyProperties(vertex, vs);
// depends on control dependency: [if], data = [none]
break;
}
}
}
}
// copy or link subgraphs to identical vertices and modify Formula field with additional values
Map<String, AtomicInteger> adressToCount = new HashMap<>();
for (ExecutionGraphVertex vertex : this.graph.getVertices()) {
// restore/add subgraphs to identical vertices
Type type = vertex.properties().getType();
if (isCell(type)) {
String address = vertex.properties().getName();
adressToCount.putIfAbsent(address, new AtomicInteger(0));
// depends on control dependency: [if], data = [none]
if (adressToCount.get(address).incrementAndGet() > 1) { // count > 1
// need to link
Set<ExecutionGraphVertex> subgraphTops = new HashSet<>();
this.graph.getVertices().stream()
.filter(v -> address.equals(v.properties.getName())) // check for subgraph
.forEach(v -> this.graph.getIncomingEdgesOf(v).forEach(e -> subgraphTops.add(this.graph.getEdgeSource(e))));
// depends on control dependency: [if], data = [none]
for (ExecutionGraphVertex subVertex : subgraphTops) {
if (!this.addressToVertices.containsKey(address)) { continue; }
this.addressToVertices.get(address).forEach(v -> this.graph.addEdge(subVertex, v));
// depends on control dependency: [for], data = [subVertex]
}
}
}
if (RANGE == type) { connectValuesToRange(vertex, this); }
// depends on control dependency: [if], data = [none]
/* Adding IF Value */
if (IF == type) {
Collection<ExecutionGraphEdge> two = this.graph.getIncomingEdgesOf(vertex);
if (two.size() != 2) { throw new CalculationEngineException("IF must have only two incoming edges."); }
Object ifBranchValue = null;
for (ExecutionGraphEdge e : two) {
ExecutionGraphVertex oneOfTwo = this.graph.getEdgeSource(e);
if (!isCompareOperand(oneOfTwo.getName())) {
ifBranchValue = oneOfTwo.properties().getValue();
// depends on control dependency: [if], data = [none]
break;
}
}
vertex.properties().setValue(ifBranchValue);
// depends on control dependency: [if], data = [none]
}
}
if (this.config.getDuplicatesNumberThreshold() != -1) { removeAllDuplicates(this); }
// depends on control dependency: [if], data = [none]
} }
|
public class class_name {
protected TableConfig getStationConfig(NetcdfDataset ds, EncodingInfo info, Formatter errlog) throws IOException {
if (!identifyEncodingStation(ds, info, CF.FeatureType.timeSeries, errlog))
return null;
// make station table
TableConfig stnTable = makeStationTable(ds, FeatureType.STATION, info, errlog);
if (stnTable == null) return null;
Dimension obsDim = info.childDim;
TableConfig obsTable = null;
switch (info.encoding) {
case single:
obsTable = makeSingle(ds, obsDim, errlog);
break;
case multidim:
obsTable = makeMultidimInner(ds, stnTable, info.childDim, info, errlog);
if (info.time.getRank() == 1) { // join time(time)
obsTable.addJoin(new JoinArray(info.time, JoinArray.Type.raw, 0));
obsTable.time = info.time.getFullName();
}
break;
case raggedContiguous:
stnTable.numRecords = info.ragged_rowSize.getFullName();
obsTable = makeRaggedContiguousChildTable(ds, info.parentDim, info.childDim, info.childStruct, errlog);
break;
case raggedIndex:
obsTable = makeRaggedIndexChildTable(ds, info.parentDim, info.childDim, info.ragged_parentIndex, errlog);
break;
case flat:
info.set(Encoding.flat, obsDim);
obsTable = makeStructTable(ds, FeatureType.STATION, info, errlog);
obsTable.parentIndex = (info.instanceId == null) ? null : info.instanceId.getFullName();
Variable stnIdVar = Evaluator.findVariableWithAttributeAndDimension(ds, CF.CF_ROLE, CF.STATION_ID, obsDim, errlog);
if (stnIdVar == null)
stnIdVar = Evaluator.findVariableWithAttributeAndDimension(ds, CF.STANDARD_NAME, CF.STATION_ID, obsDim, errlog);
obsTable.stnId = (stnIdVar == null) ? null : stnIdVar.getFullName();
obsTable.stnDesc = Evaluator.findNameOfVariableWithAttributeValue(ds, CF.STANDARD_NAME, CF.PLATFORM_NAME);
if (obsTable.stnDesc == null)
obsTable.stnDesc = Evaluator.findNameOfVariableWithAttributeValue(ds, CF.STANDARD_NAME, CF.STATION_DESC);
obsTable.stnWmoId = Evaluator.findNameVariableWithStandardNameAndDimension(ds, CF.STATION_WMOID, obsDim, errlog);
obsTable.stnAlt = Evaluator.findNameVariableWithStandardNameAndDimension(ds, CF.SURFACE_ALTITUDE, obsDim, errlog);
if (obsTable.stnAlt == null)
obsTable.stnAlt = Evaluator.findNameVariableWithStandardNameAndDimension(ds, CF.STATION_ALTITUDE, obsDim, errlog);
break;
}
if (obsTable == null) return null;
stnTable.addChild(obsTable);
return stnTable;
} }
|
public class class_name {
protected TableConfig getStationConfig(NetcdfDataset ds, EncodingInfo info, Formatter errlog) throws IOException {
if (!identifyEncodingStation(ds, info, CF.FeatureType.timeSeries, errlog))
return null;
// make station table
TableConfig stnTable = makeStationTable(ds, FeatureType.STATION, info, errlog);
if (stnTable == null) return null;
Dimension obsDim = info.childDim;
TableConfig obsTable = null;
switch (info.encoding) {
case single:
obsTable = makeSingle(ds, obsDim, errlog);
break;
case multidim:
obsTable = makeMultidimInner(ds, stnTable, info.childDim, info, errlog);
if (info.time.getRank() == 1) { // join time(time)
obsTable.addJoin(new JoinArray(info.time, JoinArray.Type.raw, 0));
// depends on control dependency: [if], data = [none]
obsTable.time = info.time.getFullName();
// depends on control dependency: [if], data = [none]
}
break;
case raggedContiguous:
stnTable.numRecords = info.ragged_rowSize.getFullName();
obsTable = makeRaggedContiguousChildTable(ds, info.parentDim, info.childDim, info.childStruct, errlog);
break;
case raggedIndex:
obsTable = makeRaggedIndexChildTable(ds, info.parentDim, info.childDim, info.ragged_parentIndex, errlog);
break;
case flat:
info.set(Encoding.flat, obsDim);
obsTable = makeStructTable(ds, FeatureType.STATION, info, errlog);
obsTable.parentIndex = (info.instanceId == null) ? null : info.instanceId.getFullName();
Variable stnIdVar = Evaluator.findVariableWithAttributeAndDimension(ds, CF.CF_ROLE, CF.STATION_ID, obsDim, errlog);
if (stnIdVar == null)
stnIdVar = Evaluator.findVariableWithAttributeAndDimension(ds, CF.STANDARD_NAME, CF.STATION_ID, obsDim, errlog);
obsTable.stnId = (stnIdVar == null) ? null : stnIdVar.getFullName();
obsTable.stnDesc = Evaluator.findNameOfVariableWithAttributeValue(ds, CF.STANDARD_NAME, CF.PLATFORM_NAME);
if (obsTable.stnDesc == null)
obsTable.stnDesc = Evaluator.findNameOfVariableWithAttributeValue(ds, CF.STANDARD_NAME, CF.STATION_DESC);
obsTable.stnWmoId = Evaluator.findNameVariableWithStandardNameAndDimension(ds, CF.STATION_WMOID, obsDim, errlog);
obsTable.stnAlt = Evaluator.findNameVariableWithStandardNameAndDimension(ds, CF.SURFACE_ALTITUDE, obsDim, errlog);
if (obsTable.stnAlt == null)
obsTable.stnAlt = Evaluator.findNameVariableWithStandardNameAndDimension(ds, CF.STATION_ALTITUDE, obsDim, errlog);
break;
}
if (obsTable == null) return null;
stnTable.addChild(obsTable);
return stnTable;
} }
|
public class class_name {
public boolean displayMessage (ChatMessage message, boolean alreadyDisplayed)
{
// nothing doing if we've not been laid out
if (!isLaidOut()) {
return false;
}
// possibly display it now
Graphics2D gfx = getTargetGraphics();
if (gfx != null) {
displayMessage(message, gfx); // display it
gfx.dispose(); // clean up
return true;
}
return false;
} }
|
public class class_name {
public boolean displayMessage (ChatMessage message, boolean alreadyDisplayed)
{
// nothing doing if we've not been laid out
if (!isLaidOut()) {
return false; // depends on control dependency: [if], data = [none]
}
// possibly display it now
Graphics2D gfx = getTargetGraphics();
if (gfx != null) {
displayMessage(message, gfx); // display it // depends on control dependency: [if], data = [none]
gfx.dispose(); // clean up // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
} }
|
public class class_name {
public Xml getChild(String name)
{
Check.notNull(name);
final NodeList list = root.getChildNodes();
for (int i = 0; i < list.getLength(); i++)
{
final Node node = list.item(i);
if (node instanceof Element && node.getNodeName().equals(name))
{
return new Xml(document, (Element) node);
}
}
throw new LionEngineException(ERROR_NODE + name);
} }
|
public class class_name {
public Xml getChild(String name)
{
Check.notNull(name);
final NodeList list = root.getChildNodes();
for (int i = 0; i < list.getLength(); i++)
{
final Node node = list.item(i);
if (node instanceof Element && node.getNodeName().equals(name))
{
return new Xml(document, (Element) node); // depends on control dependency: [if], data = [none]
}
}
throw new LionEngineException(ERROR_NODE + name);
} }
|
public class class_name {
private static Map<String, Pair<String, String>> autoGenerateMapping(List<ColumnMetadata> columns, Optional<Map<String, Set<String>>> groups)
{
Map<String, Pair<String, String>> mapping = new HashMap<>();
for (ColumnMetadata column : columns) {
Optional<String> family = getColumnLocalityGroup(column.getName(), groups);
mapping.put(column.getName(), Pair.of(family.orElse(column.getName()), column.getName()));
}
return mapping;
} }
|
public class class_name {
private static Map<String, Pair<String, String>> autoGenerateMapping(List<ColumnMetadata> columns, Optional<Map<String, Set<String>>> groups)
{
Map<String, Pair<String, String>> mapping = new HashMap<>();
for (ColumnMetadata column : columns) {
Optional<String> family = getColumnLocalityGroup(column.getName(), groups);
mapping.put(column.getName(), Pair.of(family.orElse(column.getName()), column.getName())); // depends on control dependency: [for], data = [column]
}
return mapping;
} }
|
public class class_name {
public static void deleteDirContents(File dir) {
if (!dir.isDirectory() || !dir.exists()) {
throw new IllegalArgumentException(dir.getAbsolutePath()
+ " is not a directory or does not exist.");
}
String[] children = dir.list();
for (int i = 0; i < children.length; i++) {
File f = new File(dir, children[i]);
if (f.isDirectory()) {
deleteDirContents(f);
}
f.delete();
}
} }
|
public class class_name {
public static void deleteDirContents(File dir) {
if (!dir.isDirectory() || !dir.exists()) {
throw new IllegalArgumentException(dir.getAbsolutePath()
+ " is not a directory or does not exist.");
}
String[] children = dir.list();
for (int i = 0; i < children.length; i++) {
File f = new File(dir, children[i]);
if (f.isDirectory()) {
deleteDirContents(f);
// depends on control dependency: [if], data = [none]
}
f.delete();
// depends on control dependency: [for], data = [none]
}
} }
|
public class class_name {
public static void closeConnection(Connection con) {
if (con != null) {
try {
con.close();
} catch (SQLException ex) {
logger.debug("Could not close JDBC Connection", ex);
} catch (Throwable ex) {
logger.debug("Unexpected exception on closing JDBC Connection", ex);
}
}
} }
|
public class class_name {
public static void closeConnection(Connection con) {
if (con != null) {
try {
con.close();
// depends on control dependency: [try], data = [none]
} catch (SQLException ex) {
logger.debug("Could not close JDBC Connection", ex);
} catch (Throwable ex) {
// depends on control dependency: [catch], data = [none]
logger.debug("Unexpected exception on closing JDBC Connection", ex);
}
// depends on control dependency: [catch], data = [none]
}
} }
|
public class class_name {
public UpdateAuthorizerRequest withProviderArns(String... providerArns) {
if (this.providerArns == null) {
setProviderArns(new java.util.ArrayList<String>(providerArns.length));
}
for (String ele : providerArns) {
this.providerArns.add(ele);
}
return this;
} }
|
public class class_name {
public UpdateAuthorizerRequest withProviderArns(String... providerArns) {
if (this.providerArns == null) {
setProviderArns(new java.util.ArrayList<String>(providerArns.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : providerArns) {
this.providerArns.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
protected AstNode mul(boolean required) throws ScanException, ParseException {
AstNode v = unary(required);
if (v == null) {
return null;
}
while (true) {
switch (token.getSymbol()) {
case MUL:
consumeToken();
v = createAstBinary(v, unary(true), AstBinary.MUL);
break;
case DIV:
consumeToken();
v = createAstBinary(v, unary(true), AstBinary.DIV);
break;
case MOD:
consumeToken();
v = createAstBinary(v, unary(true), AstBinary.MOD);
break;
case EXTENSION:
if (getExtensionHandler(token).getExtensionPoint() == ExtensionPoint.MUL) {
v = getExtensionHandler(consumeToken()).createAstNode(v, unary(true));
break;
}
default:
return v;
}
}
} }
|
public class class_name {
protected AstNode mul(boolean required) throws ScanException, ParseException {
AstNode v = unary(required);
if (v == null) {
return null;
}
while (true) {
switch (token.getSymbol()) {
case MUL:
consumeToken();
v = createAstBinary(v, unary(true), AstBinary.MUL);
break;
case DIV:
consumeToken();
v = createAstBinary(v, unary(true), AstBinary.DIV);
break;
case MOD:
consumeToken();
v = createAstBinary(v, unary(true), AstBinary.MOD);
break;
case EXTENSION:
if (getExtensionHandler(token).getExtensionPoint() == ExtensionPoint.MUL) {
v = getExtensionHandler(consumeToken()).createAstNode(v, unary(true)); // depends on control dependency: [if], data = [none]
break;
}
default:
return v;
}
}
} }
|
public class class_name {
private Long performCommit(JPACommit commit) throws EDBException {
synchronized (entityManager) {
long timestamp = System.currentTimeMillis();
try {
beginTransaction();
persistCommitChanges(commit, timestamp);
commitTransaction();
} catch (Exception ex) {
try {
rollbackTransaction();
} catch (Exception e) {
throw new EDBException("Failed to rollback transaction to EDB", e);
}
throw new EDBException("Failed to commit transaction to EDB", ex);
}
return timestamp;
}
} }
|
public class class_name {
private Long performCommit(JPACommit commit) throws EDBException {
synchronized (entityManager) {
long timestamp = System.currentTimeMillis();
try {
beginTransaction(); // depends on control dependency: [try], data = [none]
persistCommitChanges(commit, timestamp); // depends on control dependency: [try], data = [none]
commitTransaction(); // depends on control dependency: [try], data = [none]
} catch (Exception ex) {
try {
rollbackTransaction(); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new EDBException("Failed to rollback transaction to EDB", e);
} // depends on control dependency: [catch], data = [none]
throw new EDBException("Failed to commit transaction to EDB", ex);
} // depends on control dependency: [catch], data = [none]
return timestamp;
}
} }
|
public class class_name {
public StrBuilder insert(final int index, String str) {
validateIndex(index);
if (str == null) {
str = nullText;
}
if (str != null) {
final int strLen = str.length();
if (strLen > 0) {
final int newSize = size + strLen;
ensureCapacity(newSize);
System.arraycopy(buffer, index, buffer, index + strLen, size - index);
size = newSize;
str.getChars(0, strLen, buffer, index);
}
}
return this;
} }
|
public class class_name {
public StrBuilder insert(final int index, String str) {
validateIndex(index);
if (str == null) {
str = nullText; // depends on control dependency: [if], data = [none]
}
if (str != null) {
final int strLen = str.length();
if (strLen > 0) {
final int newSize = size + strLen;
ensureCapacity(newSize); // depends on control dependency: [if], data = [none]
System.arraycopy(buffer, index, buffer, index + strLen, size - index); // depends on control dependency: [if], data = [none]
size = newSize; // depends on control dependency: [if], data = [none]
str.getChars(0, strLen, buffer, index); // depends on control dependency: [if], data = [none]
}
}
return this;
} }
|
public class class_name {
public static String encodeUrl(final String urlStr) {
if (Util.empty(urlStr)) {
return urlStr;
}
// Percent Encode
String percentEncode = percentEncodeUrl(urlStr);
// XML Enocde
return encode(percentEncode);
} }
|
public class class_name {
public static String encodeUrl(final String urlStr) {
if (Util.empty(urlStr)) {
return urlStr; // depends on control dependency: [if], data = [none]
}
// Percent Encode
String percentEncode = percentEncodeUrl(urlStr);
// XML Enocde
return encode(percentEncode);
} }
|
public class class_name {
protected Object getLock (int siteId)
{
Object lock = null;
synchronized (_locks) {
lock = _locks.get(siteId);
// create a lock object if we haven't one already
if (lock == null) {
_locks.put(siteId, lock = new Object());
}
}
return lock;
} }
|
public class class_name {
protected Object getLock (int siteId)
{
Object lock = null;
synchronized (_locks) {
lock = _locks.get(siteId);
// create a lock object if we haven't one already
if (lock == null) {
_locks.put(siteId, lock = new Object()); // depends on control dependency: [if], data = [none]
}
}
return lock;
} }
|
public class class_name {
@SuppressWarnings({"rawtypes", "unchecked"})
public static Kryo getKryo(Map conf) {
IKryoFactory kryoFactory =
(IKryoFactory) Utils.newInstance((String) conf.get(Config.TOPOLOGY_KRYO_FACTORY));
Kryo k = kryoFactory.getKryo(conf);
k.register(byte[].class);
k.register(ListDelegate.class);
k.register(ArrayList.class, new ArrayListSerializer());
k.register(HashMap.class, new HashMapSerializer());
k.register(HashSet.class, new HashSetSerializer());
k.register(BigInteger.class, new BigIntegerSerializer());
// k.register(TransactionAttempt.class);
k.register(Values.class);
// k.register(backtype.storm.metric.api.IMetricsConsumer.DataPoint.class);
// k.register(backtype.storm.metric.api.IMetricsConsumer.TaskInfo.class);
/*
try {
JavaBridge.registerPrimitives(k);
JavaBridge.registerCollections(k);
} catch(Exception e) {
throw new RuntimeException(e);
}
*/
Map<String, String> registrations = normalizeKryoRegister(conf);
kryoFactory.preRegister(k, conf);
boolean skipMissing = (Boolean) conf.get(Config.TOPOLOGY_SKIP_MISSING_KRYO_REGISTRATIONS);
for (String klassName : registrations.keySet()) {
String serializerClassName = registrations.get(klassName);
try {
Class klass = Class.forName(klassName);
Class serializerClass = null;
if (serializerClassName != null) {
serializerClass = Class.forName(serializerClassName);
}
LOG.info("Doing kryo.register for class " + klass);
if (serializerClass == null) {
k.register(klass);
} else {
k.register(klass, resolveSerializerInstance(k, klass, serializerClass));
}
} catch (ClassNotFoundException e) {
if (skipMissing) {
LOG.info("Could not find serialization or class for "
+ serializerClassName + ". Skipping registration...");
} else {
throw new RuntimeException(e);
}
}
}
kryoFactory.postRegister(k, conf);
if (conf.get(Config.TOPOLOGY_KRYO_DECORATORS) != null) {
for (String klassName : (List<String>) conf.get(Config.TOPOLOGY_KRYO_DECORATORS)) {
try {
Class klass = Class.forName(klassName);
IKryoDecorator decorator = (IKryoDecorator) klass.newInstance();
decorator.decorate(k);
} catch (ClassNotFoundException e) {
if (skipMissing) {
LOG.info("Could not find kryo decorator named "
+ klassName + ". Skipping registration...");
} else {
throw new RuntimeException(e);
}
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
}
kryoFactory.postDecorate(k, conf);
return k;
} }
|
public class class_name {
@SuppressWarnings({"rawtypes", "unchecked"})
public static Kryo getKryo(Map conf) {
IKryoFactory kryoFactory =
(IKryoFactory) Utils.newInstance((String) conf.get(Config.TOPOLOGY_KRYO_FACTORY));
Kryo k = kryoFactory.getKryo(conf);
k.register(byte[].class);
k.register(ListDelegate.class);
k.register(ArrayList.class, new ArrayListSerializer());
k.register(HashMap.class, new HashMapSerializer());
k.register(HashSet.class, new HashSetSerializer());
k.register(BigInteger.class, new BigIntegerSerializer());
// k.register(TransactionAttempt.class);
k.register(Values.class);
// k.register(backtype.storm.metric.api.IMetricsConsumer.DataPoint.class);
// k.register(backtype.storm.metric.api.IMetricsConsumer.TaskInfo.class);
/*
try {
JavaBridge.registerPrimitives(k);
JavaBridge.registerCollections(k);
} catch(Exception e) {
throw new RuntimeException(e);
}
*/
Map<String, String> registrations = normalizeKryoRegister(conf);
kryoFactory.preRegister(k, conf);
boolean skipMissing = (Boolean) conf.get(Config.TOPOLOGY_SKIP_MISSING_KRYO_REGISTRATIONS);
for (String klassName : registrations.keySet()) {
String serializerClassName = registrations.get(klassName);
try {
Class klass = Class.forName(klassName);
Class serializerClass = null; // depends on control dependency: [try], data = [none]
if (serializerClassName != null) {
serializerClass = Class.forName(serializerClassName); // depends on control dependency: [if], data = [(serializerClassName]
}
LOG.info("Doing kryo.register for class " + klass); // depends on control dependency: [try], data = [none]
if (serializerClass == null) {
k.register(klass); // depends on control dependency: [if], data = [none]
} else {
k.register(klass, resolveSerializerInstance(k, klass, serializerClass)); // depends on control dependency: [if], data = [none]
}
} catch (ClassNotFoundException e) {
if (skipMissing) {
LOG.info("Could not find serialization or class for "
+ serializerClassName + ". Skipping registration..."); // depends on control dependency: [if], data = [none]
} else {
throw new RuntimeException(e);
}
} // depends on control dependency: [catch], data = [none]
}
kryoFactory.postRegister(k, conf);
if (conf.get(Config.TOPOLOGY_KRYO_DECORATORS) != null) {
for (String klassName : (List<String>) conf.get(Config.TOPOLOGY_KRYO_DECORATORS)) {
try {
Class klass = Class.forName(klassName);
IKryoDecorator decorator = (IKryoDecorator) klass.newInstance();
decorator.decorate(k);
} catch (ClassNotFoundException e) {
if (skipMissing) {
LOG.info("Could not find kryo decorator named "
+ klassName + ". Skipping registration..."); // depends on control dependency: [if], data = [none]
} else {
throw new RuntimeException(e);
}
} catch (InstantiationException e) { // depends on control dependency: [catch], data = [none]
throw new RuntimeException(e);
} catch (IllegalAccessException e) { // depends on control dependency: [catch], data = [none]
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
}
}
kryoFactory.postDecorate(k, conf);
return k;
} }
|
public class class_name {
public static Collection<ParserRuleContext> getAllDescendantNodes(final ParserRuleContext node, final RuleName ruleName) {
Collection<ParserRuleContext> result = new LinkedList<>();
if (isMatchedNode(node, ruleName)) {
result.add(node);
}
for (ParserRuleContext each : getChildrenNodes(node)) {
result.addAll(getAllDescendantNodes(each, ruleName));
}
return result;
} }
|
public class class_name {
public static Collection<ParserRuleContext> getAllDescendantNodes(final ParserRuleContext node, final RuleName ruleName) {
Collection<ParserRuleContext> result = new LinkedList<>();
if (isMatchedNode(node, ruleName)) {
result.add(node); // depends on control dependency: [if], data = [none]
}
for (ParserRuleContext each : getChildrenNodes(node)) {
result.addAll(getAllDescendantNodes(each, ruleName)); // depends on control dependency: [for], data = [each]
}
return result;
} }
|
public class class_name {
public void addMonitor(ThreadedMonitor... monitors) {
for (ThreadedMonitor monitor : monitors) {
if (!this.monitors.contains(monitor)) {
this.monitors.add(monitor);
}
}
} }
|
public class class_name {
public void addMonitor(ThreadedMonitor... monitors) {
for (ThreadedMonitor monitor : monitors) {
if (!this.monitors.contains(monitor)) {
this.monitors.add(monitor); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
@Override
public CommerceTaxMethod fetchByG_E(long groupId, String engineKey,
boolean retrieveFromCache) {
Object[] finderArgs = new Object[] { groupId, engineKey };
Object result = null;
if (retrieveFromCache) {
result = finderCache.getResult(FINDER_PATH_FETCH_BY_G_E,
finderArgs, this);
}
if (result instanceof CommerceTaxMethod) {
CommerceTaxMethod commerceTaxMethod = (CommerceTaxMethod)result;
if ((groupId != commerceTaxMethod.getGroupId()) ||
!Objects.equals(engineKey, commerceTaxMethod.getEngineKey())) {
result = null;
}
}
if (result == null) {
StringBundler query = new StringBundler(4);
query.append(_SQL_SELECT_COMMERCETAXMETHOD_WHERE);
query.append(_FINDER_COLUMN_G_E_GROUPID_2);
boolean bindEngineKey = false;
if (engineKey == null) {
query.append(_FINDER_COLUMN_G_E_ENGINEKEY_1);
}
else if (engineKey.equals("")) {
query.append(_FINDER_COLUMN_G_E_ENGINEKEY_3);
}
else {
bindEngineKey = true;
query.append(_FINDER_COLUMN_G_E_ENGINEKEY_2);
}
String sql = query.toString();
Session session = null;
try {
session = openSession();
Query q = session.createQuery(sql);
QueryPos qPos = QueryPos.getInstance(q);
qPos.add(groupId);
if (bindEngineKey) {
qPos.add(engineKey);
}
List<CommerceTaxMethod> list = q.list();
if (list.isEmpty()) {
finderCache.putResult(FINDER_PATH_FETCH_BY_G_E, finderArgs,
list);
}
else {
CommerceTaxMethod commerceTaxMethod = list.get(0);
result = commerceTaxMethod;
cacheResult(commerceTaxMethod);
}
}
catch (Exception e) {
finderCache.removeResult(FINDER_PATH_FETCH_BY_G_E, finderArgs);
throw processException(e);
}
finally {
closeSession(session);
}
}
if (result instanceof List<?>) {
return null;
}
else {
return (CommerceTaxMethod)result;
}
} }
|
public class class_name {
@Override
public CommerceTaxMethod fetchByG_E(long groupId, String engineKey,
boolean retrieveFromCache) {
Object[] finderArgs = new Object[] { groupId, engineKey };
Object result = null;
if (retrieveFromCache) {
result = finderCache.getResult(FINDER_PATH_FETCH_BY_G_E,
finderArgs, this); // depends on control dependency: [if], data = [none]
}
if (result instanceof CommerceTaxMethod) {
CommerceTaxMethod commerceTaxMethod = (CommerceTaxMethod)result;
if ((groupId != commerceTaxMethod.getGroupId()) ||
!Objects.equals(engineKey, commerceTaxMethod.getEngineKey())) {
result = null; // depends on control dependency: [if], data = [none]
}
}
if (result == null) {
StringBundler query = new StringBundler(4);
query.append(_SQL_SELECT_COMMERCETAXMETHOD_WHERE); // depends on control dependency: [if], data = [none]
query.append(_FINDER_COLUMN_G_E_GROUPID_2); // depends on control dependency: [if], data = [none]
boolean bindEngineKey = false;
if (engineKey == null) {
query.append(_FINDER_COLUMN_G_E_ENGINEKEY_1); // depends on control dependency: [if], data = [none]
}
else if (engineKey.equals("")) {
query.append(_FINDER_COLUMN_G_E_ENGINEKEY_3); // depends on control dependency: [if], data = [none]
}
else {
bindEngineKey = true; // depends on control dependency: [if], data = [none]
query.append(_FINDER_COLUMN_G_E_ENGINEKEY_2); // depends on control dependency: [if], data = [none]
}
String sql = query.toString();
Session session = null;
try {
session = openSession(); // depends on control dependency: [try], data = [none]
Query q = session.createQuery(sql);
QueryPos qPos = QueryPos.getInstance(q);
qPos.add(groupId); // depends on control dependency: [try], data = [none]
if (bindEngineKey) {
qPos.add(engineKey); // depends on control dependency: [if], data = [none]
}
List<CommerceTaxMethod> list = q.list();
if (list.isEmpty()) {
finderCache.putResult(FINDER_PATH_FETCH_BY_G_E, finderArgs,
list); // depends on control dependency: [if], data = [none]
}
else {
CommerceTaxMethod commerceTaxMethod = list.get(0);
result = commerceTaxMethod; // depends on control dependency: [if], data = [none]
cacheResult(commerceTaxMethod); // depends on control dependency: [if], data = [none]
}
}
catch (Exception e) {
finderCache.removeResult(FINDER_PATH_FETCH_BY_G_E, finderArgs);
throw processException(e);
} // depends on control dependency: [catch], data = [none]
finally {
closeSession(session);
}
}
if (result instanceof List<?>) {
return null; // depends on control dependency: [if], data = [none]
}
else {
return (CommerceTaxMethod)result; // depends on control dependency: [if], data = [)]
}
} }
|
public class class_name {
public void notifyStateChangeToUnavailable() {
final List<ConsumerStateListener> onAvailable = consumerConfig.getOnAvailable();
if (onAvailable != null) {
AsyncRuntime.getAsyncThreadPool().execute(new Runnable() {
@Override
public void run() {
// 状态变化通知监听器
for (ConsumerStateListener listener : onAvailable) {
try {
listener.onUnavailable(consumerConfig.getConsumerBootstrap().getProxyIns());
} catch (Exception e) {
LOGGER.errorWithApp(consumerConfig.getAppName(),
"Failed to notify consumer state listener when state change to unavailable");
}
}
}
});
}
} }
|
public class class_name {
public void notifyStateChangeToUnavailable() {
final List<ConsumerStateListener> onAvailable = consumerConfig.getOnAvailable();
if (onAvailable != null) {
AsyncRuntime.getAsyncThreadPool().execute(new Runnable() {
@Override
public void run() {
// 状态变化通知监听器
for (ConsumerStateListener listener : onAvailable) {
try {
listener.onUnavailable(consumerConfig.getConsumerBootstrap().getProxyIns()); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
LOGGER.errorWithApp(consumerConfig.getAppName(),
"Failed to notify consumer state listener when state change to unavailable");
} // depends on control dependency: [catch], data = [none]
}
}
}); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private void checkDuplicateStdOutOutput(DumpProcessingAction newAction) {
if (newAction.useStdOut()) {
if (this.quiet) {
logger.warn("Multiple actions are using stdout as output destination.");
}
this.quiet = true;
}
} }
|
public class class_name {
private void checkDuplicateStdOutOutput(DumpProcessingAction newAction) {
if (newAction.useStdOut()) {
if (this.quiet) {
logger.warn("Multiple actions are using stdout as output destination."); // depends on control dependency: [if], data = [none]
}
this.quiet = true; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static long randomLong(long startInclusive, long endExclusive) {
checkArgument(startInclusive <= endExclusive, "End must be greater than or equal to start");
if (startInclusive == endExclusive) {
return startInclusive;
}
return RANDOM.longs(1, startInclusive, endExclusive).sum();
} }
|
public class class_name {
public static long randomLong(long startInclusive, long endExclusive) {
checkArgument(startInclusive <= endExclusive, "End must be greater than or equal to start");
if (startInclusive == endExclusive) {
return startInclusive; // depends on control dependency: [if], data = [none]
}
return RANDOM.longs(1, startInclusive, endExclusive).sum();
} }
|
public class class_name {
protected List<? extends ItemData> mergeProps(ItemData rootData, List<PropertyData> childProperties,
DataManager dataManager) throws RepositoryException
{
// 1 get all transient descendants
Collection<ItemState> transientDescendants = changesLog.getLastChildrenStates(rootData, false);
if (!transientDescendants.isEmpty())
{
Map<String, ItemData> descendants = new LinkedHashMap<String, ItemData>();
outer : for (int i = 0, length = childProperties.size(); i < length; i++)
{
ItemData childProp = childProperties.get(i);
for (ItemState transientState : transientDescendants)
{
if (!transientState.isNode() && !transientState.isDeleted()
&& transientState.getData().getQPath().getDepth() == childProp.getQPath().getDepth()
&& transientState.getData().getQPath().getName().equals(childProp.getQPath().getName()))
{
continue outer;
}
}
descendants.put(childProp.getIdentifier(), childProp);
}
// merge data
for (ItemState state : transientDescendants)
{
ItemData data = state.getData();
if (!state.isDeleted())
{
descendants.put(data.getIdentifier(), data);
}
else
{
descendants.remove(data.getIdentifier());
}
}
Collection<ItemData> desc = descendants.values();
return new ArrayList<ItemData>(desc);
}
else
{
return childProperties;
}
} }
|
public class class_name {
protected List<? extends ItemData> mergeProps(ItemData rootData, List<PropertyData> childProperties,
DataManager dataManager) throws RepositoryException
{
// 1 get all transient descendants
Collection<ItemState> transientDescendants = changesLog.getLastChildrenStates(rootData, false);
if (!transientDescendants.isEmpty())
{
Map<String, ItemData> descendants = new LinkedHashMap<String, ItemData>();
outer : for (int i = 0, length = childProperties.size(); i < length; i++)
{
ItemData childProp = childProperties.get(i);
for (ItemState transientState : transientDescendants)
{
if (!transientState.isNode() && !transientState.isDeleted()
&& transientState.getData().getQPath().getDepth() == childProp.getQPath().getDepth()
&& transientState.getData().getQPath().getName().equals(childProp.getQPath().getName()))
{
continue outer;
}
}
descendants.put(childProp.getIdentifier(), childProp);
}
// merge data
for (ItemState state : transientDescendants)
{
ItemData data = state.getData();
if (!state.isDeleted())
{
descendants.put(data.getIdentifier(), data); // depends on control dependency: [if], data = [none]
}
else
{
descendants.remove(data.getIdentifier()); // depends on control dependency: [if], data = [none]
}
}
Collection<ItemData> desc = descendants.values();
return new ArrayList<ItemData>(desc);
}
else
{
return childProperties;
}
} }
|
public class class_name {
protected void notifyReconnectionFailed(Exception exception) {
if (isReconnectionAllowed()) {
for (ConnectionListener listener : connection.connectionListeners) {
listener.reconnectionFailed(exception);
}
}
} }
|
public class class_name {
protected void notifyReconnectionFailed(Exception exception) {
if (isReconnectionAllowed()) {
for (ConnectionListener listener : connection.connectionListeners) {
listener.reconnectionFailed(exception); // depends on control dependency: [for], data = [listener]
}
}
} }
|
public class class_name {
public JavaWriter beginType(String type, String kind, Set<Modifier> modifiers, String extendsType,
String... implementsTypes) throws IOException {
indent();
emitModifiers(modifiers);
out.write(kind);
out.write(" ");
emitCompressedType(type);
if (extendsType != null) {
out.write(" extends ");
emitCompressedType(extendsType);
}
if (implementsTypes.length > 0) {
out.write("\n");
indent();
out.write(" implements ");
for (int i = 0; i < implementsTypes.length; i++) {
if (i != 0) {
out.write(", ");
}
emitCompressedType(implementsTypes[i]);
}
}
out.write(" {\n");
scopes.push("interface".equals(kind) ? Scope.INTERFACE_DECLARATION : Scope.TYPE_DECLARATION);
types.push(type);
return this;
} }
|
public class class_name {
public JavaWriter beginType(String type, String kind, Set<Modifier> modifiers, String extendsType,
String... implementsTypes) throws IOException {
indent();
emitModifiers(modifiers);
out.write(kind);
out.write(" ");
emitCompressedType(type);
if (extendsType != null) {
out.write(" extends "); // depends on control dependency: [if], data = [none]
emitCompressedType(extendsType); // depends on control dependency: [if], data = [(extendsType]
}
if (implementsTypes.length > 0) {
out.write("\n"); // depends on control dependency: [if], data = [none]
indent(); // depends on control dependency: [if], data = [none]
out.write(" implements "); // depends on control dependency: [if], data = [none]
for (int i = 0; i < implementsTypes.length; i++) {
if (i != 0) {
out.write(", "); // depends on control dependency: [if], data = [none]
}
emitCompressedType(implementsTypes[i]); // depends on control dependency: [for], data = [i]
}
}
out.write(" {\n");
scopes.push("interface".equals(kind) ? Scope.INTERFACE_DECLARATION : Scope.TYPE_DECLARATION);
types.push(type);
return this;
} }
|
public class class_name {
public void marshall(ScalingPolicy scalingPolicy, ProtocolMarshaller protocolMarshaller) {
if (scalingPolicy == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(scalingPolicy.getFleetId(), FLEETID_BINDING);
protocolMarshaller.marshall(scalingPolicy.getName(), NAME_BINDING);
protocolMarshaller.marshall(scalingPolicy.getStatus(), STATUS_BINDING);
protocolMarshaller.marshall(scalingPolicy.getScalingAdjustment(), SCALINGADJUSTMENT_BINDING);
protocolMarshaller.marshall(scalingPolicy.getScalingAdjustmentType(), SCALINGADJUSTMENTTYPE_BINDING);
protocolMarshaller.marshall(scalingPolicy.getComparisonOperator(), COMPARISONOPERATOR_BINDING);
protocolMarshaller.marshall(scalingPolicy.getThreshold(), THRESHOLD_BINDING);
protocolMarshaller.marshall(scalingPolicy.getEvaluationPeriods(), EVALUATIONPERIODS_BINDING);
protocolMarshaller.marshall(scalingPolicy.getMetricName(), METRICNAME_BINDING);
protocolMarshaller.marshall(scalingPolicy.getPolicyType(), POLICYTYPE_BINDING);
protocolMarshaller.marshall(scalingPolicy.getTargetConfiguration(), TARGETCONFIGURATION_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(ScalingPolicy scalingPolicy, ProtocolMarshaller protocolMarshaller) {
if (scalingPolicy == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(scalingPolicy.getFleetId(), FLEETID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(scalingPolicy.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(scalingPolicy.getStatus(), STATUS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(scalingPolicy.getScalingAdjustment(), SCALINGADJUSTMENT_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(scalingPolicy.getScalingAdjustmentType(), SCALINGADJUSTMENTTYPE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(scalingPolicy.getComparisonOperator(), COMPARISONOPERATOR_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(scalingPolicy.getThreshold(), THRESHOLD_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(scalingPolicy.getEvaluationPeriods(), EVALUATIONPERIODS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(scalingPolicy.getMetricName(), METRICNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(scalingPolicy.getPolicyType(), POLICYTYPE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(scalingPolicy.getTargetConfiguration(), TARGETCONFIGURATION_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private static <K, N extends Node<K, N>> N balanceRight(UpdateContext<? super N> currentContext, N node, N left, N right) {
if (isRed(right) && isRed(right.right)) {
N newLeft= node.edit(currentContext, BLACK, left, right.left);
return right.edit(currentContext, RED, newLeft, right.right.blacken(currentContext));
}
else if (isRed(right) && isRed(right.left)) {
N rightLeft = right.left;
N newLeft = node.edit(currentContext, BLACK, left, rightLeft.left);
N newRight = right.edit(currentContext, BLACK, rightLeft.right, right.right);
return rightLeft.edit(currentContext, RED, newLeft, newRight);
}
else {
return node.edit(currentContext, BLACK, left, right);
}
} }
|
public class class_name {
private static <K, N extends Node<K, N>> N balanceRight(UpdateContext<? super N> currentContext, N node, N left, N right) {
if (isRed(right) && isRed(right.right)) {
N newLeft= node.edit(currentContext, BLACK, left, right.left);
return right.edit(currentContext, RED, newLeft, right.right.blacken(currentContext)); // depends on control dependency: [if], data = [none]
}
else if (isRed(right) && isRed(right.left)) {
N rightLeft = right.left;
N newLeft = node.edit(currentContext, BLACK, left, rightLeft.left);
N newRight = right.edit(currentContext, BLACK, rightLeft.right, right.right);
return rightLeft.edit(currentContext, RED, newLeft, newRight); // depends on control dependency: [if], data = [none]
}
else {
return node.edit(currentContext, BLACK, left, right); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static <T> Set<T> toSet(Iterable<T> iterable)
{
if (iterable == null)
{
return null;
}
else if (iterable instanceof Set)
{
return (Set<T>) iterable;
}
else
{
Set<T> list = new LinkedHashSet<>();
for (T obj : iterable)
{
list.add(obj);
}
return list;
}
} }
|
public class class_name {
public static <T> Set<T> toSet(Iterable<T> iterable)
{
if (iterable == null)
{
return null; // depends on control dependency: [if], data = [none]
}
else if (iterable instanceof Set)
{
return (Set<T>) iterable; // depends on control dependency: [if], data = [none]
}
else
{
Set<T> list = new LinkedHashSet<>();
for (T obj : iterable)
{
list.add(obj); // depends on control dependency: [for], data = [obj]
}
return list; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public void setMin(final T pmin) {
if (pmin == null) {
getInputElement().removeAttribute("min");
} else {
getInputElement().setMin(this.numberRenderer.render(pmin));
}
} }
|
public class class_name {
public void setMin(final T pmin) {
if (pmin == null) {
getInputElement().removeAttribute("min"); // depends on control dependency: [if], data = [none]
} else {
getInputElement().setMin(this.numberRenderer.render(pmin)); // depends on control dependency: [if], data = [(pmin]
}
} }
|
public class class_name {
public void swapColumns(int i, int j) {
if (i != j) {
Vector ii = getColumn(i);
Vector jj = getColumn(j);
setColumn(i, jj);
setColumn(j, ii);
}
} }
|
public class class_name {
public void swapColumns(int i, int j) {
if (i != j) {
Vector ii = getColumn(i);
Vector jj = getColumn(j);
setColumn(i, jj); // depends on control dependency: [if], data = [(i]
setColumn(j, ii); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@FFDCIgnore({ IOException.class })
private void flushUpgradedOutputBuffers() throws IOException {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "flushUpgraded: Flushing buffers for Upgraded output: " + this);
}
final boolean writingBody = (hasBufferedContent());
// flip the last buffer for the write...
if (writingBody && null != this._output[this.outputIndex]) {
this._output[this.outputIndex].flip();
}
try {
WsByteBuffer[] content = (writingBody) ? this._output : null;
// write it out to TCP
if(content != null) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "flushUpgraded:: Now write the content ");
}
_tcpContext.getWriteInterface().setBuffers(content);
_tcpContext.getWriteInterface().write(TCPWriteRequestContext.WRITE_ALL_DATA, WCCustomProperties31.UPGRADE_WRITE_TIMEOUT);
}
else{
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "flushUpgraded: No more data to flush ");
}
}
} catch (IOException ioe) {
this.error = ioe;
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "flushUpgraded: Received exception during write: " + ioe);
}
throw ioe;
} finally {
this.bytesWritten += this.bufferedCount;
this.bufferedCount = 0;
this.outputIndex = 0;
// Note: this logic only works for sync writes
if (writingBody) {
if (null != this._output){
if (null != this._output[0]) {
this._output[0].clear();
}
for (int i = 1; i < this._output.length; i++) {
if (null != this._output[i]) {
// mark them empty so later writes don't mistake them
// as having content
this._output[i].position(0);
this._output[i].limit(0);
}
}
}
}
// disconnect write buffers in TCP when done
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "flushUpgraded: disconnect write buffers in TCP when done");
}
_tcpContext.getWriteInterface().setBuffers(null);
}
} }
|
public class class_name {
@FFDCIgnore({ IOException.class })
private void flushUpgradedOutputBuffers() throws IOException {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "flushUpgraded: Flushing buffers for Upgraded output: " + this);
}
final boolean writingBody = (hasBufferedContent());
// flip the last buffer for the write...
if (writingBody && null != this._output[this.outputIndex]) {
this._output[this.outputIndex].flip();
}
try {
WsByteBuffer[] content = (writingBody) ? this._output : null;
// write it out to TCP
if(content != null) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "flushUpgraded:: Now write the content "); // depends on control dependency: [if], data = [none]
}
_tcpContext.getWriteInterface().setBuffers(content); // depends on control dependency: [if], data = [(content]
_tcpContext.getWriteInterface().write(TCPWriteRequestContext.WRITE_ALL_DATA, WCCustomProperties31.UPGRADE_WRITE_TIMEOUT); // depends on control dependency: [if], data = [none]
}
else{
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "flushUpgraded: No more data to flush "); // depends on control dependency: [if], data = [none]
}
}
} catch (IOException ioe) {
this.error = ioe;
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "flushUpgraded: Received exception during write: " + ioe); // depends on control dependency: [if], data = [none]
}
throw ioe;
} finally {
this.bytesWritten += this.bufferedCount;
this.bufferedCount = 0;
this.outputIndex = 0;
// Note: this logic only works for sync writes
if (writingBody) {
if (null != this._output){
if (null != this._output[0]) {
this._output[0].clear(); // depends on control dependency: [if], data = [none]
}
for (int i = 1; i < this._output.length; i++) {
if (null != this._output[i]) {
// mark them empty so later writes don't mistake them
// as having content
this._output[i].position(0); // depends on control dependency: [if], data = [none]
this._output[i].limit(0); // depends on control dependency: [if], data = [none]
}
}
}
}
// disconnect write buffers in TCP when done
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "flushUpgraded: disconnect write buffers in TCP when done"); // depends on control dependency: [if], data = [none]
}
_tcpContext.getWriteInterface().setBuffers(null);
}
} }
|
public class class_name {
public static synchronized <T> T createNiceMockAndExpectNew(Class<T> type, Object... arguments) throws Exception {
T mock = createNiceMock(type);
IExpectationSetters<T> expectationSetters = expectNiceNew(type, arguments);
if (expectationSetters != null) {
expectationSetters.andReturn(mock);
}
return mock;
} }
|
public class class_name {
public static synchronized <T> T createNiceMockAndExpectNew(Class<T> type, Object... arguments) throws Exception {
T mock = createNiceMock(type);
IExpectationSetters<T> expectationSetters = expectNiceNew(type, arguments);
if (expectationSetters != null) {
expectationSetters.andReturn(mock); // depends on control dependency: [if], data = [none]
}
return mock;
} }
|
public class class_name {
public List<MetricBean> listCounterMetrics(String tenantId) {
try {
URL endpoint = serverUrl.toURI().resolve("counters").toURL(); //$NON-NLS-1$
Request request = new Request.Builder()
.url(endpoint)
.header("Accept", "application/json") //$NON-NLS-1$ //$NON-NLS-2$
.header("Hawkular-Tenant", tenantId) //$NON-NLS-1$
.build();
Response response = httpClient.newCall(request).execute();
if (response.code() >= 400) {
throw hawkularMetricsError(response);
}
if (response.code() == 204) {
return Collections.EMPTY_LIST;
}
String responseBody = response.body().string();
return readMapper.reader(new TypeReference<List<MetricBean>>() {}).readValue(responseBody);
} catch (URISyntaxException | IOException e) {
throw new RuntimeException(e);
}
} }
|
public class class_name {
public List<MetricBean> listCounterMetrics(String tenantId) {
try {
URL endpoint = serverUrl.toURI().resolve("counters").toURL(); //$NON-NLS-1$
Request request = new Request.Builder()
.url(endpoint)
.header("Accept", "application/json") //$NON-NLS-1$ //$NON-NLS-2$
.header("Hawkular-Tenant", tenantId) //$NON-NLS-1$
.build();
Response response = httpClient.newCall(request).execute();
if (response.code() >= 400) {
throw hawkularMetricsError(response);
}
if (response.code() == 204) {
return Collections.EMPTY_LIST; // depends on control dependency: [if], data = [none]
}
String responseBody = response.body().string();
return readMapper.reader(new TypeReference<List<MetricBean>>() {}).readValue(responseBody); // depends on control dependency: [try], data = [none]
} catch (URISyntaxException | IOException e) {
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static boolean sameSubject(SubjectNode s1, SubjectNode s2) {
if (s1 instanceof URIReference && s2 instanceof URIReference) {
return sameResource((URIReference) s1, (URIReference) s2);
} else {
return false;
}
} }
|
public class class_name {
public static boolean sameSubject(SubjectNode s1, SubjectNode s2) {
if (s1 instanceof URIReference && s2 instanceof URIReference) {
return sameResource((URIReference) s1, (URIReference) s2); // depends on control dependency: [if], data = [none]
} else {
return false; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private Set<String> getTablePrimaryKeys(
String catalog, String schema, String table) {
TableKey tableKey = new TableKey(catalog, schema, table);
Set<String> primaryKeys = tablePrimaryKeysCache.get(tableKey);
if (primaryKeys == null) {
primaryKeys = loadAndCachePrimaryKeysForTable(tableKey);
}
return primaryKeys;
} }
|
public class class_name {
private Set<String> getTablePrimaryKeys(
String catalog, String schema, String table) {
TableKey tableKey = new TableKey(catalog, schema, table);
Set<String> primaryKeys = tablePrimaryKeysCache.get(tableKey);
if (primaryKeys == null) {
primaryKeys = loadAndCachePrimaryKeysForTable(tableKey); // depends on control dependency: [if], data = [none]
}
return primaryKeys;
} }
|
public class class_name {
private final void pollHeartbeat()
throws LockFile.FileSecurityException,
LockFile.LockHeldExternallyException,
LockFile.UnexpectedFileNotFoundException,
LockFile.UnexpectedEndOfFileException,
LockFile.UnexpectedFileIOException, LockFile.WrongLengthException,
LockFile.WrongMagicException {
boolean success = false;
int retries = getPollHeartbeatRetries();
long interval = getPollHeartbeatInterval();
LockFile.BaseException reason = null;
for (int i = retries; i > 0; i--) {
try {
checkHeartbeat(true); // withCreateNewFile == true
success = true;
break;
} catch (LockFile.BaseException ex) {
reason = ex;
}
// We get here if and only if success == false and reason != null,
// so its OK to 'break'
try {
Thread.sleep(interval);
} catch (InterruptedException ex) {
break;
}
}
/**
* @todo:
* Do not want to specify just BaseException in the throws clause.
* Is this really the cleanest way?
*/
if (!success) {
if (reason instanceof FileSecurityException) {
throw (FileSecurityException) reason;
} else if (reason instanceof LockHeldExternallyException) {
throw (LockHeldExternallyException) reason;
} else if (reason instanceof UnexpectedFileNotFoundException) {
throw (UnexpectedFileNotFoundException) reason;
} else if (reason instanceof UnexpectedEndOfFileException) {
throw (UnexpectedEndOfFileException) reason;
} else if (reason instanceof UnexpectedFileIOException) {
throw (UnexpectedFileIOException) reason;
} else if (reason instanceof WrongLengthException) {
throw (WrongLengthException) reason;
} else if (reason instanceof WrongMagicException) {
throw (WrongMagicException) reason;
}
}
} }
|
public class class_name {
private final void pollHeartbeat()
throws LockFile.FileSecurityException,
LockFile.LockHeldExternallyException,
LockFile.UnexpectedFileNotFoundException,
LockFile.UnexpectedEndOfFileException,
LockFile.UnexpectedFileIOException, LockFile.WrongLengthException,
LockFile.WrongMagicException {
boolean success = false;
int retries = getPollHeartbeatRetries();
long interval = getPollHeartbeatInterval();
LockFile.BaseException reason = null;
for (int i = retries; i > 0; i--) {
try {
checkHeartbeat(true); // withCreateNewFile == true // depends on control dependency: [try], data = [none]
success = true; // depends on control dependency: [try], data = [none]
break;
} catch (LockFile.BaseException ex) {
reason = ex;
} // depends on control dependency: [catch], data = [none]
// We get here if and only if success == false and reason != null,
// so its OK to 'break'
try {
Thread.sleep(interval); // depends on control dependency: [try], data = [none]
} catch (InterruptedException ex) {
break;
} // depends on control dependency: [catch], data = [none]
}
/**
* @todo:
* Do not want to specify just BaseException in the throws clause.
* Is this really the cleanest way?
*/
if (!success) {
if (reason instanceof FileSecurityException) {
throw (FileSecurityException) reason;
} else if (reason instanceof LockHeldExternallyException) {
throw (LockHeldExternallyException) reason;
} else if (reason instanceof UnexpectedFileNotFoundException) {
throw (UnexpectedFileNotFoundException) reason;
} else if (reason instanceof UnexpectedEndOfFileException) {
throw (UnexpectedEndOfFileException) reason;
} else if (reason instanceof UnexpectedFileIOException) {
throw (UnexpectedFileIOException) reason;
} else if (reason instanceof WrongLengthException) {
throw (WrongLengthException) reason;
} else if (reason instanceof WrongMagicException) {
throw (WrongMagicException) reason;
}
}
} }
|
public class class_name {
public List<Object> getSortedValues() {
List<Object> filtered = getFilteredValue();
if (filtered == null || filtered.isEmpty()) {
filtered = sortAndFilter();
}
return filtered;
} }
|
public class class_name {
public List<Object> getSortedValues() {
List<Object> filtered = getFilteredValue();
if (filtered == null || filtered.isEmpty()) {
filtered = sortAndFilter(); // depends on control dependency: [if], data = [none]
}
return filtered;
} }
|
public class class_name {
private void processEndTag() throws XMLStreamException, TTException {
mLevelInToShredder--;
if (mInserted) {
mInsertedEndTag = true;
}
if (mRemovedNode) {
mRemovedNode = false;
} else {
// Move cursor to parent.
if (mWtx.getNode().getDataKey() == mLastNodeKey) {
/*
* An end tag must have been parsed immediately before and it
* must have been an empty element at the end of a subtree, thus
* move this time to parent node.
*/
assert mWtx.getNode().hasParent() && mWtx.getNode().getKind() == IConstants.ELEMENT;
mWtx.moveTo(mWtx.getNode().getParentKey());
} else {
if (mWtx.getNode().getKind() == IConstants.ELEMENT) {
final ElementNode element = (ElementNode)mWtx.getNode();
if (element.hasFirstChild() && element.hasParent()) {
// It's not an empty element, thus move to parent.
mWtx.moveTo(mWtx.getNode().getParentKey());
}
// } else {
// checkIfLastNode(true);
// }
} else if (((ITreeStructData)mWtx.getNode()).hasParent()) {
if (((ITreeStructData)mWtx.getNode()).hasRightSibling()) {
mWtx.moveTo(((ITreeStructData)mWtx.getNode()).getRightSiblingKey());
/*
* Means next event is an end tag in StAX reader, but
* something different where the Treetank transaction
* points to, which also means it has to be deleted.
*/
mKeyMatches = -1;
mDelete = EDelete.ATBOTTOM;
deleteNode();
}
mWtx.moveTo(mWtx.getNode().getParentKey());
}
}
mLastNodeKey = mWtx.getNode().getDataKey();
// Move cursor to right sibling if it has one.
if (((ITreeStructData)mWtx.getNode()).hasRightSibling()) {
mWtx.moveTo(((ITreeStructData)mWtx.getNode()).getRightSiblingKey());
mMovedToRightSibling = true;
skipWhitespaces(mReader);
if (mReader.peek().getEventType() == XMLStreamConstants.END_ELEMENT) {
/*
* Means next event is an end tag in StAX reader, but
* something different where the Treetank transaction points
* to, which also means it has to be deleted.
*/
mKeyMatches = -1;
mDelete = EDelete.ATBOTTOM;
deleteNode();
}
} else {
mMovedToRightSibling = false;
}
}
} }
|
public class class_name {
private void processEndTag() throws XMLStreamException, TTException {
mLevelInToShredder--;
if (mInserted) {
mInsertedEndTag = true;
}
if (mRemovedNode) {
mRemovedNode = false;
} else {
// Move cursor to parent.
if (mWtx.getNode().getDataKey() == mLastNodeKey) {
/*
* An end tag must have been parsed immediately before and it
* must have been an empty element at the end of a subtree, thus
* move this time to parent node.
*/
assert mWtx.getNode().hasParent() && mWtx.getNode().getKind() == IConstants.ELEMENT;
mWtx.moveTo(mWtx.getNode().getParentKey()); // depends on control dependency: [if], data = [none]
} else {
if (mWtx.getNode().getKind() == IConstants.ELEMENT) {
final ElementNode element = (ElementNode)mWtx.getNode();
if (element.hasFirstChild() && element.hasParent()) {
// It's not an empty element, thus move to parent.
mWtx.moveTo(mWtx.getNode().getParentKey()); // depends on control dependency: [if], data = [none]
}
// } else {
// checkIfLastNode(true);
// }
} else if (((ITreeStructData)mWtx.getNode()).hasParent()) {
if (((ITreeStructData)mWtx.getNode()).hasRightSibling()) {
mWtx.moveTo(((ITreeStructData)mWtx.getNode()).getRightSiblingKey()); // depends on control dependency: [if], data = [none]
/*
* Means next event is an end tag in StAX reader, but
* something different where the Treetank transaction
* points to, which also means it has to be deleted.
*/
mKeyMatches = -1; // depends on control dependency: [if], data = [none]
mDelete = EDelete.ATBOTTOM; // depends on control dependency: [if], data = [none]
deleteNode(); // depends on control dependency: [if], data = [none]
}
mWtx.moveTo(mWtx.getNode().getParentKey()); // depends on control dependency: [if], data = [none]
}
}
mLastNodeKey = mWtx.getNode().getDataKey();
// Move cursor to right sibling if it has one.
if (((ITreeStructData)mWtx.getNode()).hasRightSibling()) {
mWtx.moveTo(((ITreeStructData)mWtx.getNode()).getRightSiblingKey()); // depends on control dependency: [if], data = [none]
mMovedToRightSibling = true; // depends on control dependency: [if], data = [none]
skipWhitespaces(mReader); // depends on control dependency: [if], data = [none]
if (mReader.peek().getEventType() == XMLStreamConstants.END_ELEMENT) {
/*
* Means next event is an end tag in StAX reader, but
* something different where the Treetank transaction points
* to, which also means it has to be deleted.
*/
mKeyMatches = -1; // depends on control dependency: [if], data = [none]
mDelete = EDelete.ATBOTTOM; // depends on control dependency: [if], data = [none]
deleteNode(); // depends on control dependency: [if], data = [none]
}
} else {
mMovedToRightSibling = false; // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
private void fetchIfNeeded(long remainingBytes)
{
if ((fetchFutures.isEmpty() || fetchFutures.peekLast().isDone())
&& remainingBytes <= prefetchConfig.getPrefetchTriggerBytes()) {
Future<Void> fetchFuture = fetchExecutor.submit(() -> {
fetch();
return null;
});
fetchFutures.add(fetchFuture);
}
} }
|
public class class_name {
private void fetchIfNeeded(long remainingBytes)
{
if ((fetchFutures.isEmpty() || fetchFutures.peekLast().isDone())
&& remainingBytes <= prefetchConfig.getPrefetchTriggerBytes()) {
Future<Void> fetchFuture = fetchExecutor.submit(() -> {
fetch();
return null; // depends on control dependency: [if], data = [none]
});
fetchFutures.add(fetchFuture);
}
} }
|
public class class_name {
public final Flux<T> timeout(Duration timeout,
@Nullable Publisher<? extends T> fallback,
Scheduler timer) {
final Mono<Long> _timer = Mono.delay(timeout, timer).onErrorReturn(0L);
final Function<T, Publisher<Long>> rest = o -> _timer;
if(fallback == null) {
return timeout(_timer, rest, timeout.toMillis() + "ms");
}
return timeout(_timer, rest, fallback);
} }
|
public class class_name {
public final Flux<T> timeout(Duration timeout,
@Nullable Publisher<? extends T> fallback,
Scheduler timer) {
final Mono<Long> _timer = Mono.delay(timeout, timer).onErrorReturn(0L);
final Function<T, Publisher<Long>> rest = o -> _timer;
if(fallback == null) {
return timeout(_timer, rest, timeout.toMillis() + "ms"); // depends on control dependency: [if], data = [none]
}
return timeout(_timer, rest, fallback);
} }
|
public class class_name {
private ModelNode populateModuleInfo(ModuleInfo module) throws Exception {
ModelNode result = new ModelNode();
result.get("name").set(module.getName());
ModelNode value;
value = result.get("main-class");
if (module.getMainClass() != null) {
value.set(module.getMainClass());
}
value = result.get("fallback-loader");
if (module.getFallbackLoader() != null) {
value.set(module.getFallbackLoader());
}
ModelNode dependencies = result.get("dependencies").setEmptyList();
for (DependencyInfo dependencySpec : module.getDependencies()) {
if (dependencySpec.getModuleName() == null) {
continue; //todo check why it returns empty dependancy
}
ModelNode dependency = dependencies.add();
dependency.get("dependency-name").set(dependencySpec.getDependencyType());
dependency.get("module-name").set(dependencySpec.getModuleName());
dependency.get("export-filter").set(dependencySpec.getExportFilter());
dependency.get("import-filter").set(dependencySpec.getImportFilter());
dependency.get("optional").set(dependencySpec.isOptional());
value = result.get("local-loader-class");
if (dependencySpec.getLocalLoader() != null) {
value.set(dependencySpec.getLocalLoader());
}
if (dependencySpec.getLocalLoaderPaths() != null) {
ModelNode paths = dependency.get("local-loader-paths");
for (String path : dependencySpec.getLocalLoaderPaths()) {
paths.add(path);
}
}
}
ModelNode resourceLoaders = result.get("resource-loaders").setEmptyList();
for (ResourceLoaderInfo loaderInfo : module.getResourceLoaders()) {
ModelNode loader = resourceLoaders.add();
loader.get("type").set(loaderInfo.getType());
ModelNode paths = loader.get("paths");
for (String path : loaderInfo.getPaths()) {
paths.add(path);
}
}
return result;
} }
|
public class class_name {
private ModelNode populateModuleInfo(ModuleInfo module) throws Exception {
ModelNode result = new ModelNode();
result.get("name").set(module.getName());
ModelNode value;
value = result.get("main-class");
if (module.getMainClass() != null) {
value.set(module.getMainClass());
}
value = result.get("fallback-loader");
if (module.getFallbackLoader() != null) {
value.set(module.getFallbackLoader());
}
ModelNode dependencies = result.get("dependencies").setEmptyList();
for (DependencyInfo dependencySpec : module.getDependencies()) {
if (dependencySpec.getModuleName() == null) {
continue; //todo check why it returns empty dependancy
}
ModelNode dependency = dependencies.add();
dependency.get("dependency-name").set(dependencySpec.getDependencyType());
dependency.get("module-name").set(dependencySpec.getModuleName());
dependency.get("export-filter").set(dependencySpec.getExportFilter());
dependency.get("import-filter").set(dependencySpec.getImportFilter());
dependency.get("optional").set(dependencySpec.isOptional());
value = result.get("local-loader-class");
if (dependencySpec.getLocalLoader() != null) {
value.set(dependencySpec.getLocalLoader());
}
if (dependencySpec.getLocalLoaderPaths() != null) {
ModelNode paths = dependency.get("local-loader-paths");
for (String path : dependencySpec.getLocalLoaderPaths()) {
paths.add(path); // depends on control dependency: [for], data = [path]
}
}
}
ModelNode resourceLoaders = result.get("resource-loaders").setEmptyList();
for (ResourceLoaderInfo loaderInfo : module.getResourceLoaders()) {
ModelNode loader = resourceLoaders.add();
loader.get("type").set(loaderInfo.getType());
ModelNode paths = loader.get("paths");
for (String path : loaderInfo.getPaths()) {
paths.add(path);
}
}
return result;
} }
|
public class class_name {
public GetContentModerationResult withModerationLabels(ContentModerationDetection... moderationLabels) {
if (this.moderationLabels == null) {
setModerationLabels(new java.util.ArrayList<ContentModerationDetection>(moderationLabels.length));
}
for (ContentModerationDetection ele : moderationLabels) {
this.moderationLabels.add(ele);
}
return this;
} }
|
public class class_name {
public GetContentModerationResult withModerationLabels(ContentModerationDetection... moderationLabels) {
if (this.moderationLabels == null) {
setModerationLabels(new java.util.ArrayList<ContentModerationDetection>(moderationLabels.length)); // depends on control dependency: [if], data = [none]
}
for (ContentModerationDetection ele : moderationLabels) {
this.moderationLabels.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
public static Date parseDate(String value, ParsePosition pos) {
Long timestamp = parseTimestamp(value);
if (timestamp != null) {
return new Date(timestamp);
}
if (value.contains(" ")) {
value = value.replace(" ", "+");
}
if (!(value.contains("-") || value.contains("+")) && !value.endsWith("Z")) {
value += SYS_TZ;
}
try {
return ISO8601Utils.parse(value, pos);
} catch (ParseException e) {
throw new ExtractorException(e);
}
} }
|
public class class_name {
public static Date parseDate(String value, ParsePosition pos) {
Long timestamp = parseTimestamp(value);
if (timestamp != null) {
return new Date(timestamp); // depends on control dependency: [if], data = [(timestamp]
}
if (value.contains(" ")) {
value = value.replace(" ", "+"); // depends on control dependency: [if], data = [none]
}
if (!(value.contains("-") || value.contains("+")) && !value.endsWith("Z")) {
value += SYS_TZ; // depends on control dependency: [if], data = [none]
}
try {
return ISO8601Utils.parse(value, pos); // depends on control dependency: [try], data = [none]
} catch (ParseException e) {
throw new ExtractorException(e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public ListModelPackagesResult withModelPackageSummaryList(ModelPackageSummary... modelPackageSummaryList) {
if (this.modelPackageSummaryList == null) {
setModelPackageSummaryList(new java.util.ArrayList<ModelPackageSummary>(modelPackageSummaryList.length));
}
for (ModelPackageSummary ele : modelPackageSummaryList) {
this.modelPackageSummaryList.add(ele);
}
return this;
} }
|
public class class_name {
public ListModelPackagesResult withModelPackageSummaryList(ModelPackageSummary... modelPackageSummaryList) {
if (this.modelPackageSummaryList == null) {
setModelPackageSummaryList(new java.util.ArrayList<ModelPackageSummary>(modelPackageSummaryList.length)); // depends on control dependency: [if], data = [none]
}
for (ModelPackageSummary ele : modelPackageSummaryList) {
this.modelPackageSummaryList.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
public void addMessageDestination(final String name, final String resolvedName, final VirtualFile deploymentRoot) {
List<MessageDestinationMapping> components = messageDestinationJndiMapping.get(name);
if (components == null) {
messageDestinationJndiMapping.put(name, components = new ArrayList<MessageDestinationMapping>(1));
}
components.add(new MessageDestinationMapping(resolvedName, deploymentRoot));
} }
|
public class class_name {
public void addMessageDestination(final String name, final String resolvedName, final VirtualFile deploymentRoot) {
List<MessageDestinationMapping> components = messageDestinationJndiMapping.get(name);
if (components == null) {
messageDestinationJndiMapping.put(name, components = new ArrayList<MessageDestinationMapping>(1)); // depends on control dependency: [if], data = [none]
}
components.add(new MessageDestinationMapping(resolvedName, deploymentRoot));
} }
|
public class class_name {
private Collection<?> getArrayPropertyAsCollection(FieldData field) {
Class<?> arrayType = field.metaData.getArrayTypeOfTypeParameter();
Object[] elements = (Object[]) resolvePropertyTypedValue(field, arrayType);
if (elements != null) {
@SuppressWarnings("unchecked")
Collection<Object> collection = ReflectionUtil.instantiateCollectionType((Class<Collection<Object>>) field.metaData.getType());
Collections.addAll(collection, elements);
return collection;
}
return null;
} }
|
public class class_name {
private Collection<?> getArrayPropertyAsCollection(FieldData field) {
Class<?> arrayType = field.metaData.getArrayTypeOfTypeParameter();
Object[] elements = (Object[]) resolvePropertyTypedValue(field, arrayType);
if (elements != null) {
@SuppressWarnings("unchecked")
Collection<Object> collection = ReflectionUtil.instantiateCollectionType((Class<Collection<Object>>) field.metaData.getType());
Collections.addAll(collection, elements); // depends on control dependency: [if], data = [none]
return collection; // depends on control dependency: [if], data = [none]
}
return null;
} }
|
public class class_name {
public void commit(PersistentTransaction transaction, boolean onePhase) throws PersistenceException, SevereMessageStoreException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "commit", new Object[]{"Transaction="+transaction, "OnePhase="+onePhase});
// Defect 363755
if (!_available)
{
MessageStoreUnavailableException msue = new MessageStoreUnavailableException("Operation not possible as MessageStore is unavailable!");
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.event(this, tc, "Operation not possible as MessageStore is unavailable!");
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "commit");
throw msue;
}
// Get the task list from the transaction
TaskList taskList = (TaskList) transaction.getWorkList();
PersistentTranId xid = transaction.getPersistentTranId();
TransactionState tranState = transaction.getTransactionState();
int numberOfTasks = taskList.countLinks();
boolean requiresSyncPersistence = false;
boolean requiresSpillPersistence = false;
BatchingContext syncBatch = null;
List syncList = null;
List spillList = null;
if (onePhase)
{
// In a one-phase commit we need to check for
// STORE_ALWAYS,STORE_EVENTUALLY and STORE_MAYBE
// all at the same time as everything is handled
// in a single phase.
if (taskList.hasStoreAlways() || taskList.hasStoreEventually() || taskList.hasStoreMaybe())
{
// Scan through the list finding out what kind of persistence we need.
// This pass will build up lists of our synchronous work and possible
// spill work.
Iterator iterator = taskList.iterator();
while (iterator.hasNext())
{
Operation task = (Operation) iterator.next();
Persistable persistable = task.getPersistable();
// Defect 451518
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Committing Task(1PC): "+task);
if (persistable != null && persistable.requiresPersistence())
{
int storageStrategy = persistable.getStorageStrategy();
TupleTypeEnum tupleType = persistable.getTupleType();
// Feature SIB0112i.ms.1
// Are we a STORE_MAYBE Item? If so then we can be directly added to the
// temporary store. This allows the ObjectManager to manage the time at
// which the transaction data is written to the temporary store (usually
// at the next available checkpoint) and removes the need for the spill
// dispatcher.
if (tupleType != TupleTypeEnum.ITEM_STREAM &&
tupleType != TupleTypeEnum.REFERENCE_STREAM &&
storageStrategy == AbstractItem.STORE_MAYBE)
{
// Feature SIB0112i.ms.1
if (requiresSpillPersistence == false)
{
spillList = new ArrayList(numberOfTasks);
requiresSpillPersistence = true;
}
// Defect 530772
// Add the task to the list we will pass
// to the dispatcher.
spillList.add(task);
}
// If not we are synchronous.
else
{
// Defect 298584
// Only create our batching context once we know for sure
// that we have some synchronous work to do.
if (requiresSyncPersistence == false)
{
syncList = new ArrayList(numberOfTasks);
syncBatch = new BatchingContextImpl(_objectManager, _permanentStore);
requiresSyncPersistence = true;
}
task.persist(syncBatch, tranState);
syncList.add(persistable);
}
}
}
}
}
//two-phase commit
else
{
if (taskList.hasStoreAlways() || taskList.hasStoreEventually() || taskList.hasStoreMaybe())
{
Iterator iterator = taskList.iterator();
while (iterator.hasNext())
{
Operation task = (Operation) iterator.next();
Persistable persistable = task.getPersistable();
// Defect 451518
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Committing Task(2PC): "+task);
if (persistable != null && persistable.requiresPersistence())
{
int storageStrategy = persistable.getStorageStrategy();
TupleTypeEnum tupleType = persistable.getTupleType();
// Feature SIB0112i.ms.1
// Are we a STORE_MAYBE Item? If so then we can be directly added to the
// temporary store. This allows the ObjectManager to manage the time at
// which the tramnsaction data is written to the temporary store (usually
// at the next available checkpoint) and removes the need for the spill
// dispatcher.
if (tupleType != TupleTypeEnum.ITEM_STREAM &&
tupleType != TupleTypeEnum.REFERENCE_STREAM &&
storageStrategy == AbstractItem.STORE_MAYBE)
{
// Feature SIB0112i.ms.1
// If we have some STORE_MAYBE work to do then
// we can now add it to the file store's
// temporary store.
if (requiresSpillPersistence == false)
{
spillList = new ArrayList(numberOfTasks);
requiresSpillPersistence = true;
}
// Defect 530772
// Add the task to the list we will pass
// to the dispatcher.
spillList.add(task);
}
// If not we are synchronous.
else
{
// Defect 298584
// We only need to check on our batching context once
// we know that we have some synchronous work from
// the first phase that needs completing.
if (requiresSyncPersistence == false)
{
// Get the batching context for this two-phase operation.
// One must exist from the prepare phase if it does not
// then we need to check if this transaction has been
// recovered or not.
syncBatch = transaction.getBatchingContext();
if (syncBatch == null)
{
// We need to see if this is the second phase of
// a recovered indoubt transaction if not we can
// throw an exception.
Transaction tran = null;
try
{
tran = _objectManager.getTransactionByXID(xid.toByteArray());
}
catch (ObjectManagerException ome)
{
com.ibm.ws.ffdc.FFDCFilter.processException(ome, "com.ibm.ws.sib.msgstore.persistence.objectManager.PersistableMessageStoreImpl.commit", "1:1800:1.81.1.6", this);
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.event(this, tc, "Exception caught requesting recovered transaction from object manager!", ome);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "commit");
throw new PersistenceException("Exception caught requesting recovered transaction from object manager: "+ome.getMessage(), ome);
}
if (tran != null)
{
// We are completing an indoubt so we need to
// create a batching context to use.
syncBatch = new BatchingContextImpl(_objectManager, _permanentStore, tran);
transaction.setBatchingContext(syncBatch);
}
else
{
PersistenceException pe = new PersistenceException("No existing batching context found during two-phase commit!");
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.event(this, tc, "No existing batching context found during two-phase commit!", pe);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "commit");
throw pe;
}
}
requiresSyncPersistence = true;
}
}
}
}
}
}
// Defect 530772
// Check the health of the spill dispatcher to minimise the risk
// of splitting a transaction.
if (requiresSpillPersistence)
{
if (!_spillDispatcher.isHealthy())
{
// Defect 345250
// The dispatcher is not currently accepting new work as it has
// hit a problem. Details of the problem should have been output
// to the logs by the dispatcher thread at the point where it
// occurred.
PersistenceException pe = new PersistenceException("SPILL_DISPATCHER_CANNOT_ACCEPT_WORK_SIMS1578");
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.event(this, tc, "The spill dispatcher cannot accept work!", pe);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "prepare");
throw pe;
}
}
if (requiresSyncPersistence)
{
try
{
// Tell the batch that we need to commit during the
// next executeBatch call.
syncBatch.updateXIDToCommitted(null);
//Submit the batch
syncBatch.executeBatch();
// If we are one-phase then we need to inform the cache
// layer of the successful writes. If we are two-phase
// this will have been done at prepare time.
if (onePhase)
{
//If we get to this point then the commit was successful, so we need to
//notify the cache layer
Iterator iterator = syncList.iterator();
while (iterator.hasNext())
{
Tuple tuple = (Tuple)iterator.next();
//Both these methods need to be called to indicate to the cache
//that it can soften its in-memory reference to the persistable
tuple.persistableOperationBegun();
tuple.persistableOperationCompleted();
}
}
}
catch (PersistenceException pe)
{
com.ibm.ws.ffdc.FFDCFilter.processException(pe, "com.ibm.ws.sib.msgstore.persistence.objectManager.PersistableMessageStoreImpl.commit", "1:1880:1.81.1.6", this);
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.event(this, tc, "Exception caught committing transaction work in permanent store!", pe);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "commit");
throw pe;
}
}
// Defect 530772
// Always submit any spill updates to the dispatcher for processing.
if (requiresSpillPersistence)
{
_spillDispatcher.dispatch(spillList, transaction, false);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "commit");
} }
|
public class class_name {
public void commit(PersistentTransaction transaction, boolean onePhase) throws PersistenceException, SevereMessageStoreException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "commit", new Object[]{"Transaction="+transaction, "OnePhase="+onePhase});
// Defect 363755
if (!_available)
{
MessageStoreUnavailableException msue = new MessageStoreUnavailableException("Operation not possible as MessageStore is unavailable!");
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.event(this, tc, "Operation not possible as MessageStore is unavailable!");
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "commit");
throw msue;
}
// Get the task list from the transaction
TaskList taskList = (TaskList) transaction.getWorkList();
PersistentTranId xid = transaction.getPersistentTranId();
TransactionState tranState = transaction.getTransactionState();
int numberOfTasks = taskList.countLinks();
boolean requiresSyncPersistence = false;
boolean requiresSpillPersistence = false;
BatchingContext syncBatch = null;
List syncList = null;
List spillList = null;
if (onePhase)
{
// In a one-phase commit we need to check for
// STORE_ALWAYS,STORE_EVENTUALLY and STORE_MAYBE
// all at the same time as everything is handled
// in a single phase.
if (taskList.hasStoreAlways() || taskList.hasStoreEventually() || taskList.hasStoreMaybe())
{
// Scan through the list finding out what kind of persistence we need.
// This pass will build up lists of our synchronous work and possible
// spill work.
Iterator iterator = taskList.iterator();
while (iterator.hasNext())
{
Operation task = (Operation) iterator.next();
Persistable persistable = task.getPersistable();
// Defect 451518
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Committing Task(1PC): "+task);
if (persistable != null && persistable.requiresPersistence())
{
int storageStrategy = persistable.getStorageStrategy();
TupleTypeEnum tupleType = persistable.getTupleType();
// Feature SIB0112i.ms.1
// Are we a STORE_MAYBE Item? If so then we can be directly added to the
// temporary store. This allows the ObjectManager to manage the time at
// which the transaction data is written to the temporary store (usually
// at the next available checkpoint) and removes the need for the spill
// dispatcher.
if (tupleType != TupleTypeEnum.ITEM_STREAM &&
tupleType != TupleTypeEnum.REFERENCE_STREAM &&
storageStrategy == AbstractItem.STORE_MAYBE)
{
// Feature SIB0112i.ms.1
if (requiresSpillPersistence == false)
{
spillList = new ArrayList(numberOfTasks); // depends on control dependency: [if], data = [none]
requiresSpillPersistence = true; // depends on control dependency: [if], data = [none]
}
// Defect 530772
// Add the task to the list we will pass
// to the dispatcher.
spillList.add(task); // depends on control dependency: [if], data = [none]
}
// If not we are synchronous.
else
{
// Defect 298584
// Only create our batching context once we know for sure
// that we have some synchronous work to do.
if (requiresSyncPersistence == false)
{
syncList = new ArrayList(numberOfTasks); // depends on control dependency: [if], data = [none]
syncBatch = new BatchingContextImpl(_objectManager, _permanentStore); // depends on control dependency: [if], data = [none]
requiresSyncPersistence = true; // depends on control dependency: [if], data = [none]
}
task.persist(syncBatch, tranState); // depends on control dependency: [if], data = [none]
syncList.add(persistable); // depends on control dependency: [if], data = [none]
}
}
}
}
}
//two-phase commit
else
{
if (taskList.hasStoreAlways() || taskList.hasStoreEventually() || taskList.hasStoreMaybe())
{
Iterator iterator = taskList.iterator();
while (iterator.hasNext())
{
Operation task = (Operation) iterator.next();
Persistable persistable = task.getPersistable();
// Defect 451518
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Committing Task(2PC): "+task);
if (persistable != null && persistable.requiresPersistence())
{
int storageStrategy = persistable.getStorageStrategy();
TupleTypeEnum tupleType = persistable.getTupleType();
// Feature SIB0112i.ms.1
// Are we a STORE_MAYBE Item? If so then we can be directly added to the
// temporary store. This allows the ObjectManager to manage the time at
// which the tramnsaction data is written to the temporary store (usually
// at the next available checkpoint) and removes the need for the spill
// dispatcher.
if (tupleType != TupleTypeEnum.ITEM_STREAM &&
tupleType != TupleTypeEnum.REFERENCE_STREAM &&
storageStrategy == AbstractItem.STORE_MAYBE)
{
// Feature SIB0112i.ms.1
// If we have some STORE_MAYBE work to do then
// we can now add it to the file store's
// temporary store.
if (requiresSpillPersistence == false)
{
spillList = new ArrayList(numberOfTasks); // depends on control dependency: [if], data = [none]
requiresSpillPersistence = true; // depends on control dependency: [if], data = [none]
}
// Defect 530772
// Add the task to the list we will pass
// to the dispatcher.
spillList.add(task);
}
// If not we are synchronous.
else
{
// Defect 298584
// We only need to check on our batching context once
// we know that we have some synchronous work from
// the first phase that needs completing.
if (requiresSyncPersistence == false)
{
// Get the batching context for this two-phase operation.
// One must exist from the prepare phase if it does not
// then we need to check if this transaction has been
// recovered or not.
syncBatch = transaction.getBatchingContext();
if (syncBatch == null)
{
// We need to see if this is the second phase of
// a recovered indoubt transaction if not we can
// throw an exception.
Transaction tran = null;
try
{
tran = _objectManager.getTransactionByXID(xid.toByteArray());
}
catch (ObjectManagerException ome)
{
com.ibm.ws.ffdc.FFDCFilter.processException(ome, "com.ibm.ws.sib.msgstore.persistence.objectManager.PersistableMessageStoreImpl.commit", "1:1800:1.81.1.6", this);
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.event(this, tc, "Exception caught requesting recovered transaction from object manager!", ome);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "commit");
throw new PersistenceException("Exception caught requesting recovered transaction from object manager: "+ome.getMessage(), ome);
}
if (tran != null)
{
// We are completing an indoubt so we need to
// create a batching context to use.
syncBatch = new BatchingContextImpl(_objectManager, _permanentStore, tran);
transaction.setBatchingContext(syncBatch);
}
else
{
PersistenceException pe = new PersistenceException("No existing batching context found during two-phase commit!");
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.event(this, tc, "No existing batching context found during two-phase commit!", pe);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "commit");
throw pe;
}
}
requiresSyncPersistence = true;
}
}
}
}
}
}
// Defect 530772
// Check the health of the spill dispatcher to minimise the risk
// of splitting a transaction.
if (requiresSpillPersistence)
{
if (!_spillDispatcher.isHealthy())
{
// Defect 345250
// The dispatcher is not currently accepting new work as it has
// hit a problem. Details of the problem should have been output
// to the logs by the dispatcher thread at the point where it
// occurred.
PersistenceException pe = new PersistenceException("SPILL_DISPATCHER_CANNOT_ACCEPT_WORK_SIMS1578");
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.event(this, tc, "The spill dispatcher cannot accept work!", pe);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "prepare");
throw pe;
}
}
if (requiresSyncPersistence)
{
try
{
// Tell the batch that we need to commit during the
// next executeBatch call.
syncBatch.updateXIDToCommitted(null);
//Submit the batch
syncBatch.executeBatch();
// If we are one-phase then we need to inform the cache
// layer of the successful writes. If we are two-phase
// this will have been done at prepare time.
if (onePhase)
{
//If we get to this point then the commit was successful, so we need to
//notify the cache layer
Iterator iterator = syncList.iterator();
while (iterator.hasNext())
{
Tuple tuple = (Tuple)iterator.next();
//Both these methods need to be called to indicate to the cache
//that it can soften its in-memory reference to the persistable
tuple.persistableOperationBegun();
tuple.persistableOperationCompleted();
}
}
}
catch (PersistenceException pe)
{
com.ibm.ws.ffdc.FFDCFilter.processException(pe, "com.ibm.ws.sib.msgstore.persistence.objectManager.PersistableMessageStoreImpl.commit", "1:1880:1.81.1.6", this);
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.event(this, tc, "Exception caught committing transaction work in permanent store!", pe);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "commit");
throw pe;
}
}
// Defect 530772
// Always submit any spill updates to the dispatcher for processing.
if (requiresSpillPersistence)
{
_spillDispatcher.dispatch(spillList, transaction, false);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "commit");
} }
|
public class class_name {
void mergeClusters_(int cluster_1, int cluster2) {
// dbg_check_cluster_(cluster_1);
// dbg_check_cluster_(cluster2);
int eventQnode = getClusterEventQNode(cluster2);
if (eventQnode != -1) {
m_event_q.deleteNode(eventQnode, -1);
setClusterEventQNode_(cluster2, -1);
}
int firstEdge1 = getClusterFirstEdge(cluster_1);
int firstEdge2 = getClusterFirstEdge(cluster2);
if (firstEdge2 != -1) {// scope
int edge2 = firstEdge2;
int lastEdge = firstEdge2;
boolean bForceContinue = false;
// Delete edges that connect cluster_1 and cluster2.
do {
// dbg_check_edge_(edge2);
bForceContinue = false;
// assert(!StridedIndexTypeCollection.isValidElement(getEdgeSweepNode(edge2)));
int end = getEdgeEnd(edge2, cluster2);
int nextEdge2 = getNextEdgeEx(edge2, end);
if (getEdgeCluster(edge2, (end + 1) & 1) == cluster_1) { // Snapping
// clusters
// that
// are
// connected
// with
// an
// edge
// Delete
// the
// edge.
disconnectEdge_(edge2);
int edgeOrigins2 = getEdgeOriginVertices(edge2);
m_edge_vertices.deleteList(edgeOrigins2);
deleteEdge_(edge2);
if (edge2 == nextEdge2) {// deleted last edge connecting to
// the cluster2 (all connections
// are degenerate)
firstEdge2 = -1;
break;
}
if (firstEdge2 == edge2) {
firstEdge2 = getClusterFirstEdge(cluster2);
lastEdge = nextEdge2;
bForceContinue = true;
}
} else {
assert (edge2 != getClusterFirstEdge(cluster_1));
}
edge2 = nextEdge2;
} while (edge2 != lastEdge || bForceContinue);
if (firstEdge2 != -1) {
// set the cluster to the edge ends
do {
int end = getEdgeEnd(edge2, cluster2);
int nextEdge2 = getNextEdgeEx(edge2, end);
assert (edge2 != getClusterFirstEdge(cluster_1));
setEdgeCluster_(edge2, end, cluster_1);
edge2 = nextEdge2;
} while (edge2 != lastEdge);
firstEdge1 = getClusterFirstEdge(cluster_1);
if (firstEdge1 != -1) {
int next1 = getNextEdge(firstEdge1, cluster_1);
int next2 = getNextEdge(firstEdge2, cluster_1);
if (next1 == firstEdge1) {
setClusterFirstEdge_(cluster_1, firstEdge2);
addEdgeToClusterImpl_(firstEdge1, cluster_1);
setClusterFirstEdge_(cluster_1, firstEdge1);
} else if (next2 == firstEdge2) {
addEdgeToClusterImpl_(firstEdge2, cluster_1);
}
setNextEdge_(firstEdge2, cluster_1, next1);
setPrevEdge_(next1, cluster_1, firstEdge2);
setNextEdge_(firstEdge1, cluster_1, next2);
setPrevEdge_(next2, cluster_1, firstEdge1);
} else {
setClusterFirstEdge_(cluster_1, firstEdge2);
}
}
}
int vertices1 = getClusterVertices(cluster_1);
int vertices2 = getClusterVertices(cluster2);
// Update cluster info on vertices.
for (int vh = m_cluster_vertices.getFirst(vertices2); vh != -1; vh = m_cluster_vertices
.getNext(vh)) {
int v = m_cluster_vertices.getElement(vh);
m_shape.setUserIndex(v, m_vertex_cluster_index, cluster_1);
}
m_cluster_vertices.concatenateLists(vertices1, vertices2);
deleteCluster_(cluster2);
// dbg_check_cluster_(cluster_1);
} }
|
public class class_name {
void mergeClusters_(int cluster_1, int cluster2) {
// dbg_check_cluster_(cluster_1);
// dbg_check_cluster_(cluster2);
int eventQnode = getClusterEventQNode(cluster2);
if (eventQnode != -1) {
m_event_q.deleteNode(eventQnode, -1); // depends on control dependency: [if], data = [(eventQnode]
setClusterEventQNode_(cluster2, -1); // depends on control dependency: [if], data = [-1)]
}
int firstEdge1 = getClusterFirstEdge(cluster_1);
int firstEdge2 = getClusterFirstEdge(cluster2);
if (firstEdge2 != -1) {// scope
int edge2 = firstEdge2;
int lastEdge = firstEdge2;
boolean bForceContinue = false;
// Delete edges that connect cluster_1 and cluster2.
do {
// dbg_check_edge_(edge2);
bForceContinue = false;
// assert(!StridedIndexTypeCollection.isValidElement(getEdgeSweepNode(edge2)));
int end = getEdgeEnd(edge2, cluster2);
int nextEdge2 = getNextEdgeEx(edge2, end);
if (getEdgeCluster(edge2, (end + 1) & 1) == cluster_1) { // Snapping
// clusters
// that
// are
// connected
// with
// an
// edge
// Delete
// the
// edge.
disconnectEdge_(edge2); // depends on control dependency: [if], data = [none]
int edgeOrigins2 = getEdgeOriginVertices(edge2);
m_edge_vertices.deleteList(edgeOrigins2); // depends on control dependency: [if], data = [none]
deleteEdge_(edge2); // depends on control dependency: [if], data = [none]
if (edge2 == nextEdge2) {// deleted last edge connecting to
// the cluster2 (all connections
// are degenerate)
firstEdge2 = -1; // depends on control dependency: [if], data = [none]
break;
}
if (firstEdge2 == edge2) {
firstEdge2 = getClusterFirstEdge(cluster2); // depends on control dependency: [if], data = [none]
lastEdge = nextEdge2; // depends on control dependency: [if], data = [none]
bForceContinue = true; // depends on control dependency: [if], data = [none]
}
} else {
assert (edge2 != getClusterFirstEdge(cluster_1)); // depends on control dependency: [if], data = [cluster_1)]
}
edge2 = nextEdge2;
} while (edge2 != lastEdge || bForceContinue);
if (firstEdge2 != -1) {
// set the cluster to the edge ends
do {
int end = getEdgeEnd(edge2, cluster2);
int nextEdge2 = getNextEdgeEx(edge2, end);
assert (edge2 != getClusterFirstEdge(cluster_1));
setEdgeCluster_(edge2, end, cluster_1);
edge2 = nextEdge2;
} while (edge2 != lastEdge);
firstEdge1 = getClusterFirstEdge(cluster_1); // depends on control dependency: [if], data = [none]
if (firstEdge1 != -1) {
int next1 = getNextEdge(firstEdge1, cluster_1);
int next2 = getNextEdge(firstEdge2, cluster_1);
if (next1 == firstEdge1) {
setClusterFirstEdge_(cluster_1, firstEdge2); // depends on control dependency: [if], data = [none]
addEdgeToClusterImpl_(firstEdge1, cluster_1); // depends on control dependency: [if], data = [none]
setClusterFirstEdge_(cluster_1, firstEdge1); // depends on control dependency: [if], data = [firstEdge1)]
} else if (next2 == firstEdge2) {
addEdgeToClusterImpl_(firstEdge2, cluster_1); // depends on control dependency: [if], data = [none]
}
setNextEdge_(firstEdge2, cluster_1, next1); // depends on control dependency: [if], data = [none]
setPrevEdge_(next1, cluster_1, firstEdge2); // depends on control dependency: [if], data = [none]
setNextEdge_(firstEdge1, cluster_1, next2); // depends on control dependency: [if], data = [(firstEdge1]
setPrevEdge_(next2, cluster_1, firstEdge1); // depends on control dependency: [if], data = [none]
} else {
setClusterFirstEdge_(cluster_1, firstEdge2); // depends on control dependency: [if], data = [none]
}
}
}
int vertices1 = getClusterVertices(cluster_1);
int vertices2 = getClusterVertices(cluster2);
// Update cluster info on vertices.
for (int vh = m_cluster_vertices.getFirst(vertices2); vh != -1; vh = m_cluster_vertices
.getNext(vh)) {
int v = m_cluster_vertices.getElement(vh);
m_shape.setUserIndex(v, m_vertex_cluster_index, cluster_1); // depends on control dependency: [for], data = [none]
}
m_cluster_vertices.concatenateLists(vertices1, vertices2);
deleteCluster_(cluster2);
// dbg_check_cluster_(cluster_1);
} }
|
public class class_name {
public static String postLaunchHTML(
final Map<String, String> cleanProperties, String endpoint, boolean debug) {
if (cleanProperties == null || cleanProperties.isEmpty()) {
throw new IllegalArgumentException(
"cleanProperties == null || cleanProperties.isEmpty()");
}
if (endpoint == null) {
throw new IllegalArgumentException("endpoint == null");
}
Map<String, String> newMap = null;
if (debug) {
// sort the properties for readability
newMap = new TreeMap<String, String>(cleanProperties);
} else {
newMap = cleanProperties;
}
StringBuilder text = new StringBuilder();
// paint form
text.append("<div id=\"ltiLaunchFormSubmitArea\">\n");
text.append("<form action=\"");
text.append(endpoint);
text.append("\" name=\"ltiLaunchForm\" id=\"ltiLaunchForm\" method=\"post\" ");
text.append(" encType=\"application/x-www-form-urlencoded\" accept-charset=\"utf-8\">\n");
for (Entry<String, String> entry : newMap.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
if (value == null) {
continue;
}
// This will escape the contents pretty much - at least
// we will be safe and not generate dangerous HTML
key = htmlspecialchars(key);
value = htmlspecialchars(value);
if (key.equals(BASICLTI_SUBMIT)) {
text.append("<input type=\"submit\" name=\"");
} else {
text.append("<input type=\"hidden\" name=\"");
}
text.append(key);
text.append("\" value=\"");
text.append(value);
text.append("\"/>\n");
}
text.append("</form>\n");
text.append("</div>\n");
// Paint the auto-pop up if we are transitioning from https: to http:
// and are not already the top frame...
text.append("<script type=\"text/javascript\">\n");
text.append("if (window.top!=window.self) {\n");
text.append(" theform = document.getElementById('ltiLaunchForm');\n");
text.append(" if ( theform && theform.action ) {\n");
text.append(" formAction = theform.action;\n");
text.append(" ourUrl = window.location.href;\n");
text.append(" if ( formAction.indexOf('http://') == 0 && ourUrl.indexOf('https://') == 0 ) {\n");
text.append(" theform.target = '_blank';\n");
text.append(" window.console && console.log('Launching http from https in new window!');\n");
text.append(" }\n");
text.append(" }\n");
text.append("}\n");
text.append("</script>\n");
// paint debug output
if (debug) {
text.append("<pre>\n");
text.append("<b>BasicLTI Endpoint</b>\n");
text.append(endpoint);
text.append("\n\n");
text.append("<b>BasicLTI Parameters:</b>\n");
for (Entry<String, String> entry : newMap.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
if (value == null) {
continue;
}
text.append(htmlspecialchars(key));
text.append("=");
text.append(htmlspecialchars(value));
text.append("\n");
}
text.append("</pre>\n");
} else {
// paint auto submit script
text
.append(" <script language=\"javascript\"> \n"
+ " document.getElementById(\"ltiLaunchFormSubmitArea\").style.display = \"none\";\n"
+ " nei = document.createElement('input');\n"
+ " nei.setAttribute('type', 'hidden');\n"
+ " nei.setAttribute('name', '"
+ BASICLTI_SUBMIT
+ "');\n"
+ " nei.setAttribute('value', '"
+ newMap.get(BASICLTI_SUBMIT)
+ "');\n"
+ " document.getElementById(\"ltiLaunchForm\").appendChild(nei);\n"
+ " document.ltiLaunchForm.submit(); \n" + " </script> \n");
}
String htmltext = text.toString();
return htmltext;
} }
|
public class class_name {
public static String postLaunchHTML(
final Map<String, String> cleanProperties, String endpoint, boolean debug) {
if (cleanProperties == null || cleanProperties.isEmpty()) {
throw new IllegalArgumentException(
"cleanProperties == null || cleanProperties.isEmpty()");
}
if (endpoint == null) {
throw new IllegalArgumentException("endpoint == null");
}
Map<String, String> newMap = null;
if (debug) {
// sort the properties for readability
newMap = new TreeMap<String, String>(cleanProperties); // depends on control dependency: [if], data = [none]
} else {
newMap = cleanProperties; // depends on control dependency: [if], data = [none]
}
StringBuilder text = new StringBuilder();
// paint form
text.append("<div id=\"ltiLaunchFormSubmitArea\">\n");
text.append("<form action=\"");
text.append(endpoint);
text.append("\" name=\"ltiLaunchForm\" id=\"ltiLaunchForm\" method=\"post\" ");
text.append(" encType=\"application/x-www-form-urlencoded\" accept-charset=\"utf-8\">\n");
for (Entry<String, String> entry : newMap.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
if (value == null) {
continue;
}
// This will escape the contents pretty much - at least
// we will be safe and not generate dangerous HTML
key = htmlspecialchars(key); // depends on control dependency: [for], data = [none]
value = htmlspecialchars(value); // depends on control dependency: [for], data = [none]
if (key.equals(BASICLTI_SUBMIT)) {
text.append("<input type=\"submit\" name=\""); // depends on control dependency: [if], data = [none]
} else {
text.append("<input type=\"hidden\" name=\""); // depends on control dependency: [if], data = [none]
}
text.append(key); // depends on control dependency: [for], data = [none]
text.append("\" value=\""); // depends on control dependency: [for], data = [none]
text.append(value); // depends on control dependency: [for], data = [none]
text.append("\"/>\n"); // depends on control dependency: [for], data = [none]
}
text.append("</form>\n");
text.append("</div>\n");
// Paint the auto-pop up if we are transitioning from https: to http:
// and are not already the top frame...
text.append("<script type=\"text/javascript\">\n");
text.append("if (window.top!=window.self) {\n");
text.append(" theform = document.getElementById('ltiLaunchForm');\n");
text.append(" if ( theform && theform.action ) {\n");
text.append(" formAction = theform.action;\n");
text.append(" ourUrl = window.location.href;\n");
text.append(" if ( formAction.indexOf('http://') == 0 && ourUrl.indexOf('https://') == 0 ) {\n");
text.append(" theform.target = '_blank';\n");
text.append(" window.console && console.log('Launching http from https in new window!');\n");
text.append(" }\n");
text.append(" }\n");
text.append("}\n");
text.append("</script>\n");
// paint debug output
if (debug) {
text.append("<pre>\n"); // depends on control dependency: [if], data = [none]
text.append("<b>BasicLTI Endpoint</b>\n"); // depends on control dependency: [if], data = [none]
text.append(endpoint); // depends on control dependency: [if], data = [none]
text.append("\n\n"); // depends on control dependency: [if], data = [none]
text.append("<b>BasicLTI Parameters:</b>\n"); // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
for (Entry<String, String> entry : newMap.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
if (value == null) {
continue;
}
text.append(htmlspecialchars(key)); // depends on control dependency: [for], data = [none]
text.append("="); // depends on control dependency: [for], data = [none]
text.append(htmlspecialchars(value)); // depends on control dependency: [for], data = [none]
text.append("\n"); // depends on control dependency: [for], data = [none]
}
text.append("</pre>\n"); // depends on control dependency: [if], data = [none]
} else {
// paint auto submit script
text
.append(" <script language=\"javascript\"> \n"
+ " document.getElementById(\"ltiLaunchFormSubmitArea\").style.display = \"none\";\n" // depends on control dependency: [if], data = [none]
+ " nei = document.createElement('input');\n" // depends on control dependency: [if], data = [none]
+ " nei.setAttribute('type', 'hidden');\n" // depends on control dependency: [if], data = [none]
+ " nei.setAttribute('name', '"
+ BASICLTI_SUBMIT
+ "');\n" // depends on control dependency: [if], data = [none]
+ " nei.setAttribute('value', '"
+ newMap.get(BASICLTI_SUBMIT)
+ "');\n" // depends on control dependency: [if], data = [none]
+ " document.getElementById(\"ltiLaunchForm\").appendChild(nei);\n" // depends on control dependency: [if], data = [none]
+ " document.ltiLaunchForm.submit(); \n" + " </script> \n"); // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
}
String htmltext = text.toString();
return htmltext;
} }
|
public class class_name {
public static void addTags(Span span, RequestLog log) {
final String host = log.requestHeaders().authority();
assert host != null;
span.tag("http.host", host);
final StringBuilder uriBuilder = new StringBuilder()
.append(log.scheme().uriText())
.append("://")
.append(host)
.append(log.path());
if (log.query() != null) {
uriBuilder.append('?').append(log.query());
}
span.tag("http.method", log.method().name())
.tag("http.path", log.path())
.tag("http.url", uriBuilder.toString())
.tag("http.status_code", log.status().codeAsText());
final Throwable responseCause = log.responseCause();
if (responseCause != null) {
span.tag("error", responseCause.toString());
}
final SocketAddress raddr = log.context().remoteAddress();
if (raddr != null) {
span.tag("address.remote", raddr.toString());
}
final SocketAddress laddr = log.context().localAddress();
if (laddr != null) {
span.tag("address.local", laddr.toString());
}
final Object requestContent = log.requestContent();
if (requestContent instanceof RpcRequest) {
span.name(((RpcRequest) requestContent).method());
}
} }
|
public class class_name {
public static void addTags(Span span, RequestLog log) {
final String host = log.requestHeaders().authority();
assert host != null;
span.tag("http.host", host);
final StringBuilder uriBuilder = new StringBuilder()
.append(log.scheme().uriText())
.append("://")
.append(host)
.append(log.path());
if (log.query() != null) {
uriBuilder.append('?').append(log.query()); // depends on control dependency: [if], data = [(log.query()]
}
span.tag("http.method", log.method().name())
.tag("http.path", log.path())
.tag("http.url", uriBuilder.toString())
.tag("http.status_code", log.status().codeAsText());
final Throwable responseCause = log.responseCause();
if (responseCause != null) {
span.tag("error", responseCause.toString()); // depends on control dependency: [if], data = [none]
}
final SocketAddress raddr = log.context().remoteAddress();
if (raddr != null) {
span.tag("address.remote", raddr.toString()); // depends on control dependency: [if], data = [none]
}
final SocketAddress laddr = log.context().localAddress();
if (laddr != null) {
span.tag("address.local", laddr.toString()); // depends on control dependency: [if], data = [none]
}
final Object requestContent = log.requestContent();
if (requestContent instanceof RpcRequest) {
span.name(((RpcRequest) requestContent).method()); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public ODocument toStream() {
document.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING);
final List<ODocument> inds = new ArrayList<ODocument>(indexDefinitions.size());
final List<String> indClasses = new ArrayList<String>(indexDefinitions.size());
try {
document.field("className", className);
for (final OIndexDefinition indexDefinition : indexDefinitions) {
final ODocument indexDocument = indexDefinition.toStream();
inds.add(indexDocument);
indClasses.add(indexDefinition.getClass().getName());
}
document.field("indexDefinitions", inds, OType.EMBEDDEDLIST);
document.field("indClasses", indClasses, OType.EMBEDDEDLIST);
} finally {
document.setInternalStatus(ORecordElement.STATUS.LOADED);
}
return document;
} }
|
public class class_name {
@Override
public ODocument toStream() {
document.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING);
final List<ODocument> inds = new ArrayList<ODocument>(indexDefinitions.size());
final List<String> indClasses = new ArrayList<String>(indexDefinitions.size());
try {
document.field("className", className);
// depends on control dependency: [try], data = [none]
for (final OIndexDefinition indexDefinition : indexDefinitions) {
final ODocument indexDocument = indexDefinition.toStream();
inds.add(indexDocument);
// depends on control dependency: [for], data = [none]
indClasses.add(indexDefinition.getClass().getName());
// depends on control dependency: [for], data = [indexDefinition]
}
document.field("indexDefinitions", inds, OType.EMBEDDEDLIST);
// depends on control dependency: [try], data = [none]
document.field("indClasses", indClasses, OType.EMBEDDEDLIST);
// depends on control dependency: [try], data = [none]
} finally {
document.setInternalStatus(ORecordElement.STATUS.LOADED);
}
return document;
} }
|
public class class_name {
public static void printOnce(Object msg) {
if (messagesCache.get(msg.hashCode()) == null) {
JK.printBlock(msg);
messagesCache.put(msg.hashCode(), msg);
}
} }
|
public class class_name {
public static void printOnce(Object msg) {
if (messagesCache.get(msg.hashCode()) == null) {
JK.printBlock(msg);
// depends on control dependency: [if], data = [none]
messagesCache.put(msg.hashCode(), msg);
// depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public BELAnnotation convert(List<Annotation> a) {
if (a == null) {
return null;
}
List<String> values = new ArrayList<String>();
AnnotationDefinition ad = null;
for (int i = 0; i < a.size(); i++) {
Annotation ann = a.get(i);
if (i == 0) {
ad = ann.getDefinition();
} else if (ad != null && !ad.equals(ann.getDefinition())) {
throw new IllegalStateException(
"annotations must have equal definitions.");
}
values.add(ann.getValue());
}
return new BELAnnotation(badc.convert(ad), values);
} }
|
public class class_name {
@Override
public BELAnnotation convert(List<Annotation> a) {
if (a == null) {
return null; // depends on control dependency: [if], data = [none]
}
List<String> values = new ArrayList<String>();
AnnotationDefinition ad = null;
for (int i = 0; i < a.size(); i++) {
Annotation ann = a.get(i);
if (i == 0) {
ad = ann.getDefinition(); // depends on control dependency: [if], data = [none]
} else if (ad != null && !ad.equals(ann.getDefinition())) {
throw new IllegalStateException(
"annotations must have equal definitions.");
}
values.add(ann.getValue()); // depends on control dependency: [for], data = [none]
}
return new BELAnnotation(badc.convert(ad), values);
} }
|
public class class_name {
protected void save() {
boolean newUser = false;
try {
if (m_user == null) {
createNewUser();
newUser = true;
} else {
saveUser();
}
saveUserSettings();
if (m_sendEmail.getValue().booleanValue() & m_sendEmail.isEnabled()) {
sendMail(m_cms, m_pw.getPassword1(), m_user, newUser, m_forceResetPassword.getValue().booleanValue());
}
} catch (CmsException e) {
LOG.error("Unable to save user", e);
}
} }
|
public class class_name {
protected void save() {
boolean newUser = false;
try {
if (m_user == null) {
createNewUser(); // depends on control dependency: [if], data = [none]
newUser = true; // depends on control dependency: [if], data = [none]
} else {
saveUser(); // depends on control dependency: [if], data = [none]
}
saveUserSettings(); // depends on control dependency: [try], data = [none]
if (m_sendEmail.getValue().booleanValue() & m_sendEmail.isEnabled()) {
sendMail(m_cms, m_pw.getPassword1(), m_user, newUser, m_forceResetPassword.getValue().booleanValue()); // depends on control dependency: [if], data = [none]
}
} catch (CmsException e) {
LOG.error("Unable to save user", e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private boolean decodeHandshake( ByteBuffer socketBufferNew ) {
ByteBuffer socketBuffer;
if( tmpHandshakeBytes.capacity() == 0 ) {
socketBuffer = socketBufferNew;
} else {
if( tmpHandshakeBytes.remaining() < socketBufferNew.remaining() ) {
ByteBuffer buf = ByteBuffer.allocate( tmpHandshakeBytes.capacity() + socketBufferNew.remaining() );
tmpHandshakeBytes.flip();
buf.put( tmpHandshakeBytes );
tmpHandshakeBytes = buf;
}
tmpHandshakeBytes.put( socketBufferNew );
tmpHandshakeBytes.flip();
socketBuffer = tmpHandshakeBytes;
}
socketBuffer.mark();
try {
HandshakeState handshakestate;
try {
if( role == Role.SERVER ) {
if( draft == null ) {
for( Draft d : knownDrafts ) {
d = d.copyInstance();
try {
d.setParseMode( role );
socketBuffer.reset();
Handshakedata tmphandshake = d.translateHandshake( socketBuffer );
if( !( tmphandshake instanceof ClientHandshake ) ) {
log.trace("Closing due to wrong handshake");
closeConnectionDueToWrongHandshake( new InvalidDataException( CloseFrame.PROTOCOL_ERROR, "wrong http function" ) );
return false;
}
ClientHandshake handshake = ( ClientHandshake ) tmphandshake;
handshakestate = d.acceptHandshakeAsServer( handshake );
if( handshakestate == HandshakeState.MATCHED ) {
resourceDescriptor = handshake.getResourceDescriptor();
ServerHandshakeBuilder response;
try {
response = wsl.onWebsocketHandshakeReceivedAsServer( this, d, handshake );
} catch ( InvalidDataException e ) {
log.trace("Closing due to wrong handshake. Possible handshake rejection", e);
closeConnectionDueToWrongHandshake( e );
return false;
} catch ( RuntimeException e ) {
log.error("Closing due to internal server error", e);
wsl.onWebsocketError( this, e );
closeConnectionDueToInternalServerError( e );
return false;
}
write( d.createHandshake( d.postProcessHandshakeResponseAsServer( handshake, response ) ) );
draft = d;
open( handshake );
return true;
}
} catch ( InvalidHandshakeException e ) {
// go on with an other draft
}
}
if( draft == null ) {
log.trace("Closing due to protocol error: no draft matches");
closeConnectionDueToWrongHandshake( new InvalidDataException( CloseFrame.PROTOCOL_ERROR, "no draft matches" ) );
}
return false;
} else {
// special case for multiple step handshakes
Handshakedata tmphandshake = draft.translateHandshake( socketBuffer );
if( !( tmphandshake instanceof ClientHandshake ) ) {
log.trace("Closing due to protocol error: wrong http function");
flushAndClose( CloseFrame.PROTOCOL_ERROR, "wrong http function", false );
return false;
}
ClientHandshake handshake = ( ClientHandshake ) tmphandshake;
handshakestate = draft.acceptHandshakeAsServer( handshake );
if( handshakestate == HandshakeState.MATCHED ) {
open( handshake );
return true;
} else {
log.trace("Closing due to protocol error: the handshake did finally not match");
close( CloseFrame.PROTOCOL_ERROR, "the handshake did finally not match" );
}
return false;
}
} else if( role == Role.CLIENT ) {
draft.setParseMode( role );
Handshakedata tmphandshake = draft.translateHandshake( socketBuffer );
if( !( tmphandshake instanceof ServerHandshake ) ) {
log.trace("Closing due to protocol error: wrong http function");
flushAndClose( CloseFrame.PROTOCOL_ERROR, "wrong http function", false );
return false;
}
ServerHandshake handshake = ( ServerHandshake ) tmphandshake;
handshakestate = draft.acceptHandshakeAsClient( handshakerequest, handshake );
if( handshakestate == HandshakeState.MATCHED ) {
try {
wsl.onWebsocketHandshakeReceivedAsClient( this, handshakerequest, handshake );
} catch ( InvalidDataException e ) {
log.trace("Closing due to invalid data exception. Possible handshake rejection", e);
flushAndClose( e.getCloseCode(), e.getMessage(), false );
return false;
} catch ( RuntimeException e ) {
log.error("Closing since client was never connected", e);
wsl.onWebsocketError( this, e );
flushAndClose( CloseFrame.NEVER_CONNECTED, e.getMessage(), false );
return false;
}
open( handshake );
return true;
} else {
log.trace("Closing due to protocol error: draft {} refuses handshake", draft );
close( CloseFrame.PROTOCOL_ERROR, "draft " + draft + " refuses handshake" );
}
}
} catch ( InvalidHandshakeException e ) {
log.trace("Closing due to invalid handshake", e);
close( e );
}
} catch ( IncompleteHandshakeException e ) {
if( tmpHandshakeBytes.capacity() == 0 ) {
socketBuffer.reset();
int newsize = e.getPreferredSize();
if( newsize == 0 ) {
newsize = socketBuffer.capacity() + 16;
} else {
assert ( e.getPreferredSize() >= socketBuffer.remaining() );
}
tmpHandshakeBytes = ByteBuffer.allocate( newsize );
tmpHandshakeBytes.put( socketBufferNew );
// tmpHandshakeBytes.flip();
} else {
tmpHandshakeBytes.position( tmpHandshakeBytes.limit() );
tmpHandshakeBytes.limit( tmpHandshakeBytes.capacity() );
}
}
return false;
} }
|
public class class_name {
private boolean decodeHandshake( ByteBuffer socketBufferNew ) {
ByteBuffer socketBuffer;
if( tmpHandshakeBytes.capacity() == 0 ) {
socketBuffer = socketBufferNew; // depends on control dependency: [if], data = [none]
} else {
if( tmpHandshakeBytes.remaining() < socketBufferNew.remaining() ) {
ByteBuffer buf = ByteBuffer.allocate( tmpHandshakeBytes.capacity() + socketBufferNew.remaining() );
tmpHandshakeBytes.flip(); // depends on control dependency: [if], data = [none]
buf.put( tmpHandshakeBytes ); // depends on control dependency: [if], data = [none]
tmpHandshakeBytes = buf; // depends on control dependency: [if], data = [none]
}
tmpHandshakeBytes.put( socketBufferNew ); // depends on control dependency: [if], data = [none]
tmpHandshakeBytes.flip(); // depends on control dependency: [if], data = [none]
socketBuffer = tmpHandshakeBytes; // depends on control dependency: [if], data = [none]
}
socketBuffer.mark();
try {
HandshakeState handshakestate;
try {
if( role == Role.SERVER ) {
if( draft == null ) {
for( Draft d : knownDrafts ) {
d = d.copyInstance(); // depends on control dependency: [for], data = [d]
try {
d.setParseMode( role ); // depends on control dependency: [try], data = [none]
socketBuffer.reset(); // depends on control dependency: [try], data = [none]
Handshakedata tmphandshake = d.translateHandshake( socketBuffer );
if( !( tmphandshake instanceof ClientHandshake ) ) {
log.trace("Closing due to wrong handshake"); // depends on control dependency: [if], data = [none]
closeConnectionDueToWrongHandshake( new InvalidDataException( CloseFrame.PROTOCOL_ERROR, "wrong http function" ) ); // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
ClientHandshake handshake = ( ClientHandshake ) tmphandshake;
handshakestate = d.acceptHandshakeAsServer( handshake ); // depends on control dependency: [try], data = [none]
if( handshakestate == HandshakeState.MATCHED ) {
resourceDescriptor = handshake.getResourceDescriptor(); // depends on control dependency: [if], data = [none]
ServerHandshakeBuilder response;
try {
response = wsl.onWebsocketHandshakeReceivedAsServer( this, d, handshake ); // depends on control dependency: [try], data = [none]
} catch ( InvalidDataException e ) {
log.trace("Closing due to wrong handshake. Possible handshake rejection", e);
closeConnectionDueToWrongHandshake( e );
return false;
} catch ( RuntimeException e ) { // depends on control dependency: [catch], data = [none]
log.error("Closing due to internal server error", e);
wsl.onWebsocketError( this, e );
closeConnectionDueToInternalServerError( e );
return false;
} // depends on control dependency: [catch], data = [none]
write( d.createHandshake( d.postProcessHandshakeResponseAsServer( handshake, response ) ) ); // depends on control dependency: [if], data = [none]
draft = d; // depends on control dependency: [if], data = [none]
open( handshake ); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
} catch ( InvalidHandshakeException e ) {
// go on with an other draft
} // depends on control dependency: [catch], data = [none]
}
if( draft == null ) {
log.trace("Closing due to protocol error: no draft matches"); // depends on control dependency: [if], data = [none]
closeConnectionDueToWrongHandshake( new InvalidDataException( CloseFrame.PROTOCOL_ERROR, "no draft matches" ) ); // depends on control dependency: [if], data = [none]
}
return false; // depends on control dependency: [if], data = [none]
} else {
// special case for multiple step handshakes
Handshakedata tmphandshake = draft.translateHandshake( socketBuffer );
if( !( tmphandshake instanceof ClientHandshake ) ) {
log.trace("Closing due to protocol error: wrong http function"); // depends on control dependency: [if], data = [none]
flushAndClose( CloseFrame.PROTOCOL_ERROR, "wrong http function", false ); // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
ClientHandshake handshake = ( ClientHandshake ) tmphandshake;
handshakestate = draft.acceptHandshakeAsServer( handshake ); // depends on control dependency: [if], data = [none]
if( handshakestate == HandshakeState.MATCHED ) {
open( handshake ); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
} else {
log.trace("Closing due to protocol error: the handshake did finally not match"); // depends on control dependency: [if], data = [none]
close( CloseFrame.PROTOCOL_ERROR, "the handshake did finally not match" ); // depends on control dependency: [if], data = [none]
}
return false; // depends on control dependency: [if], data = [none]
}
} else if( role == Role.CLIENT ) {
draft.setParseMode( role ); // depends on control dependency: [if], data = [( role]
Handshakedata tmphandshake = draft.translateHandshake( socketBuffer );
if( !( tmphandshake instanceof ServerHandshake ) ) {
log.trace("Closing due to protocol error: wrong http function"); // depends on control dependency: [if], data = [none]
flushAndClose( CloseFrame.PROTOCOL_ERROR, "wrong http function", false ); // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
ServerHandshake handshake = ( ServerHandshake ) tmphandshake;
handshakestate = draft.acceptHandshakeAsClient( handshakerequest, handshake ); // depends on control dependency: [if], data = [none]
if( handshakestate == HandshakeState.MATCHED ) {
try {
wsl.onWebsocketHandshakeReceivedAsClient( this, handshakerequest, handshake ); // depends on control dependency: [try], data = [none]
} catch ( InvalidDataException e ) {
log.trace("Closing due to invalid data exception. Possible handshake rejection", e);
flushAndClose( e.getCloseCode(), e.getMessage(), false );
return false;
} catch ( RuntimeException e ) { // depends on control dependency: [catch], data = [none]
log.error("Closing since client was never connected", e);
wsl.onWebsocketError( this, e );
flushAndClose( CloseFrame.NEVER_CONNECTED, e.getMessage(), false );
return false;
} // depends on control dependency: [catch], data = [none]
open( handshake ); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
} else {
log.trace("Closing due to protocol error: draft {} refuses handshake", draft ); // depends on control dependency: [if], data = [none]
close( CloseFrame.PROTOCOL_ERROR, "draft " + draft + " refuses handshake" ); // depends on control dependency: [if], data = [none]
}
}
} catch ( InvalidHandshakeException e ) {
log.trace("Closing due to invalid handshake", e);
close( e );
} // depends on control dependency: [catch], data = [none]
} catch ( IncompleteHandshakeException e ) {
if( tmpHandshakeBytes.capacity() == 0 ) {
socketBuffer.reset(); // depends on control dependency: [if], data = [none]
int newsize = e.getPreferredSize();
if( newsize == 0 ) {
newsize = socketBuffer.capacity() + 16; // depends on control dependency: [if], data = [none]
} else {
assert ( e.getPreferredSize() >= socketBuffer.remaining() ); // depends on control dependency: [if], data = [none]
}
tmpHandshakeBytes = ByteBuffer.allocate( newsize ); // depends on control dependency: [if], data = [none]
tmpHandshakeBytes.put( socketBufferNew ); // depends on control dependency: [if], data = [none]
// tmpHandshakeBytes.flip();
} else {
tmpHandshakeBytes.position( tmpHandshakeBytes.limit() ); // depends on control dependency: [if], data = [none]
tmpHandshakeBytes.limit( tmpHandshakeBytes.capacity() ); // depends on control dependency: [if], data = [( tmpHandshakeBytes.capacity()]
}
} // depends on control dependency: [catch], data = [none]
return false;
} }
|
public class class_name {
public static <K, V> void putSetMult(Map<K, Set<V>> map, K key,
Collection<? extends V> values) {
for (V value : values) {
putSet(map, key, value);
}
} }
|
public class class_name {
public static <K, V> void putSetMult(Map<K, Set<V>> map, K key,
Collection<? extends V> values) {
for (V value : values) {
putSet(map, key, value); // depends on control dependency: [for], data = [value]
}
} }
|
public class class_name {
public void removeGroupFromPathProfile(int group_id, int pathId, int profileId) {
int[] groupIds = Utils.arrayFromStringOfIntegers(getGroupIdsInPathProfile(profileId,
pathId));
String newGroupIds = "";
for (int i = 0; i < groupIds.length; i++) {
if (groupIds[i] != group_id) {
newGroupIds += (groupIds[i] + ",");
}
}
EditService.updatePathTable(Constants.PATH_PROFILE_GROUP_IDS, newGroupIds, pathId);
} }
|
public class class_name {
public void removeGroupFromPathProfile(int group_id, int pathId, int profileId) {
int[] groupIds = Utils.arrayFromStringOfIntegers(getGroupIdsInPathProfile(profileId,
pathId));
String newGroupIds = "";
for (int i = 0; i < groupIds.length; i++) {
if (groupIds[i] != group_id) {
newGroupIds += (groupIds[i] + ","); // depends on control dependency: [if], data = [(groupIds[i]]
}
}
EditService.updatePathTable(Constants.PATH_PROFILE_GROUP_IDS, newGroupIds, pathId);
} }
|
public class class_name {
public com.google.privacy.dlp.v2.PrivacyMetric.LDiversityConfig getLDiversityConfig() {
if (typeCase_ == 4) {
return (com.google.privacy.dlp.v2.PrivacyMetric.LDiversityConfig) type_;
}
return com.google.privacy.dlp.v2.PrivacyMetric.LDiversityConfig.getDefaultInstance();
} }
|
public class class_name {
public com.google.privacy.dlp.v2.PrivacyMetric.LDiversityConfig getLDiversityConfig() {
if (typeCase_ == 4) {
return (com.google.privacy.dlp.v2.PrivacyMetric.LDiversityConfig) type_; // depends on control dependency: [if], data = [none]
}
return com.google.privacy.dlp.v2.PrivacyMetric.LDiversityConfig.getDefaultInstance();
} }
|
public class class_name {
public String getNoEditReason(Locale locale, boolean ignoreExpiration) throws CmsException {
String reason = "";
if (m_resource instanceof I_CmsHistoryResource) {
reason = Messages.get().getBundle(locale).key(Messages.GUI_NO_EDIT_REASON_HISTORY_0);
} else if (!m_cms.hasPermissions(
m_resource,
CmsPermissionSet.ACCESS_WRITE,
false,
ignoreExpiration ? CmsResourceFilter.IGNORE_EXPIRATION : CmsResourceFilter.DEFAULT) || !isEditable()) {
reason = Messages.get().getBundle(locale).key(Messages.GUI_NO_EDIT_REASON_PERMISSION_0);
} else if (!getLock().isLockableBy(m_cms.getRequestContext().getCurrentUser())) {
if (getLock().getSystemLock().isPublish()) {
reason = Messages.get().getBundle(locale).key(Messages.GUI_PUBLISH_TOOLTIP_0);
} else {
reason = Messages.get().getBundle(locale).key(Messages.GUI_NO_EDIT_REASON_LOCK_1, getLockedByName());
}
}
return reason;
} }
|
public class class_name {
public String getNoEditReason(Locale locale, boolean ignoreExpiration) throws CmsException {
String reason = "";
if (m_resource instanceof I_CmsHistoryResource) {
reason = Messages.get().getBundle(locale).key(Messages.GUI_NO_EDIT_REASON_HISTORY_0);
} else if (!m_cms.hasPermissions(
m_resource,
CmsPermissionSet.ACCESS_WRITE,
false,
ignoreExpiration ? CmsResourceFilter.IGNORE_EXPIRATION : CmsResourceFilter.DEFAULT) || !isEditable()) {
reason = Messages.get().getBundle(locale).key(Messages.GUI_NO_EDIT_REASON_PERMISSION_0);
} else if (!getLock().isLockableBy(m_cms.getRequestContext().getCurrentUser())) {
if (getLock().getSystemLock().isPublish()) {
reason = Messages.get().getBundle(locale).key(Messages.GUI_PUBLISH_TOOLTIP_0); // depends on control dependency: [if], data = [none]
} else {
reason = Messages.get().getBundle(locale).key(Messages.GUI_NO_EDIT_REASON_LOCK_1, getLockedByName()); // depends on control dependency: [if], data = [none]
}
}
return reason;
} }
|
public class class_name {
void resetFields() {
if (m_bundleComponentKeyMap != null) {
Set<TextField> setBundles = m_bundleComponentKeyMap.keySet();
for (TextField field : setBundles) {
m_bundleValues.removeComponent(field);
}
m_bundleComponentKeyMap.clear();
}
m_fieldKeepTemplate.setVisible(!CmsStringUtil.isEmptyOrWhitespaceOnly(m_fieldLoadSiteTemplate.getValue()));
m_fieldKeepTemplate.setValue(
Boolean.valueOf(!CmsStringUtil.isEmptyOrWhitespaceOnly(m_fieldLoadSiteTemplate.getValue())));
} }
|
public class class_name {
void resetFields() {
if (m_bundleComponentKeyMap != null) {
Set<TextField> setBundles = m_bundleComponentKeyMap.keySet();
for (TextField field : setBundles) {
m_bundleValues.removeComponent(field); // depends on control dependency: [for], data = [field]
}
m_bundleComponentKeyMap.clear(); // depends on control dependency: [if], data = [none]
}
m_fieldKeepTemplate.setVisible(!CmsStringUtil.isEmptyOrWhitespaceOnly(m_fieldLoadSiteTemplate.getValue()));
m_fieldKeepTemplate.setValue(
Boolean.valueOf(!CmsStringUtil.isEmptyOrWhitespaceOnly(m_fieldLoadSiteTemplate.getValue())));
} }
|
public class class_name {
public void doPaint(Graphics2D g, JComponent c, int width, int height, Object[] extendedCacheKeys) {
SegmentType segmentStatus = getSegmentType(c);
int newHeight = getButtonHeight(c, height);
int yOffset = (height - newHeight) / 2;
height = newHeight;
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
int x = focusInsets.left;
int y = focusInsets.top + yOffset;
width -= focusInsets.left + focusInsets.right;
height -= focusInsets.top + focusInsets.bottom;
boolean useToolBarFocus = isInToolBar(c);
Shape s;
if (focused) {
s = createOuterFocus(segmentStatus, x, y, width, height);
g.setPaint(getFocusPaint(s, FocusType.OUTER_FOCUS, useToolBarFocus));
g.draw(s);
s = createInnerFocus(segmentStatus, x, y, width, height);
g.setPaint(getFocusPaint(s, FocusType.INNER_FOCUS, useToolBarFocus));
g.draw(s);
}
if (!isInToolBar(c) || this instanceof TexturedButtonPainter) {
s = createBorder(segmentStatus, x, y, width, height);
if (!focused) {
dropShadow.fill(g, s);
}
g.setPaint(getCommonBorderPaint(s, type));
g.fill(s);
s = createInterior(segmentStatus, x, y, width, height);
g.setPaint(getCommonInteriorPaint(s, type));
g.fill(s);
}
} }
|
public class class_name {
public void doPaint(Graphics2D g, JComponent c, int width, int height, Object[] extendedCacheKeys) {
SegmentType segmentStatus = getSegmentType(c);
int newHeight = getButtonHeight(c, height);
int yOffset = (height - newHeight) / 2;
height = newHeight;
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
int x = focusInsets.left;
int y = focusInsets.top + yOffset;
width -= focusInsets.left + focusInsets.right;
height -= focusInsets.top + focusInsets.bottom;
boolean useToolBarFocus = isInToolBar(c);
Shape s;
if (focused) {
s = createOuterFocus(segmentStatus, x, y, width, height); // depends on control dependency: [if], data = [none]
g.setPaint(getFocusPaint(s, FocusType.OUTER_FOCUS, useToolBarFocus)); // depends on control dependency: [if], data = [none]
g.draw(s); // depends on control dependency: [if], data = [none]
s = createInnerFocus(segmentStatus, x, y, width, height); // depends on control dependency: [if], data = [none]
g.setPaint(getFocusPaint(s, FocusType.INNER_FOCUS, useToolBarFocus)); // depends on control dependency: [if], data = [none]
g.draw(s); // depends on control dependency: [if], data = [none]
}
if (!isInToolBar(c) || this instanceof TexturedButtonPainter) {
s = createBorder(segmentStatus, x, y, width, height); // depends on control dependency: [if], data = [none]
if (!focused) {
dropShadow.fill(g, s); // depends on control dependency: [if], data = [none]
}
g.setPaint(getCommonBorderPaint(s, type)); // depends on control dependency: [if], data = [none]
g.fill(s); // depends on control dependency: [if], data = [none]
s = createInterior(segmentStatus, x, y, width, height); // depends on control dependency: [if], data = [none]
g.setPaint(getCommonInteriorPaint(s, type)); // depends on control dependency: [if], data = [none]
g.fill(s); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@SuppressWarnings("rawtypes")
@Override
public State makeState(Map conf, IMetricsContext metrics, int partitionIndex, int numPartitions)
{
InfinispanKmeansState resultState = new InfinispanKmeansState(partitionIndex,
numPartitions, this.servers, this.cacheName);
if (this.mergeInterval > 0)
{
resultState.setMergeInterval(this.mergeInterval);
}
if (this.lifespan > 0)
{
resultState.setLifespan(this.lifespan);
}
resultState.initialize();
return resultState;
} }
|
public class class_name {
@SuppressWarnings("rawtypes")
@Override
public State makeState(Map conf, IMetricsContext metrics, int partitionIndex, int numPartitions)
{
InfinispanKmeansState resultState = new InfinispanKmeansState(partitionIndex,
numPartitions, this.servers, this.cacheName);
if (this.mergeInterval > 0)
{
resultState.setMergeInterval(this.mergeInterval); // depends on control dependency: [if], data = [(this.mergeInterval]
}
if (this.lifespan > 0)
{
resultState.setLifespan(this.lifespan); // depends on control dependency: [if], data = [(this.lifespan]
}
resultState.initialize();
return resultState;
} }
|
public class class_name {
public final void mRULE_OPEN() throws RecognitionException {
try {
int _type = RULE_OPEN;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalSimpleAntlr.g:1730:11: ( '(' ( RULE_WS )* )
// InternalSimpleAntlr.g:1730:13: '(' ( RULE_WS )*
{
match('(');
// InternalSimpleAntlr.g:1730:17: ( RULE_WS )*
loop1:
do {
int alt1=2;
int LA1_0 = input.LA(1);
if ( ((LA1_0>='\t' && LA1_0<='\n')||LA1_0=='\r'||LA1_0==' ') ) {
alt1=1;
}
switch (alt1) {
case 1 :
// InternalSimpleAntlr.g:1730:17: RULE_WS
{
mRULE_WS();
}
break;
default :
break loop1;
}
} while (true);
}
state.type = _type;
state.channel = _channel;
}
finally {
}
} }
|
public class class_name {
public final void mRULE_OPEN() throws RecognitionException {
try {
int _type = RULE_OPEN;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalSimpleAntlr.g:1730:11: ( '(' ( RULE_WS )* )
// InternalSimpleAntlr.g:1730:13: '(' ( RULE_WS )*
{
match('(');
// InternalSimpleAntlr.g:1730:17: ( RULE_WS )*
loop1:
do {
int alt1=2;
int LA1_0 = input.LA(1);
if ( ((LA1_0>='\t' && LA1_0<='\n')||LA1_0=='\r'||LA1_0==' ') ) {
alt1=1; // depends on control dependency: [if], data = [none]
}
switch (alt1) {
case 1 :
// InternalSimpleAntlr.g:1730:17: RULE_WS
{
mRULE_WS();
}
break;
default :
break loop1;
}
} while (true);
}
state.type = _type;
state.channel = _channel;
}
finally {
}
} }
|
public class class_name {
public void error( Object messagePattern, Object arg )
{
if( m_delegate.isErrorEnabled() )
{
String msgStr = (String) messagePattern;
msgStr = MessageFormatter.format( msgStr, arg );
m_delegate.error( msgStr, null );
}
} }
|
public class class_name {
public void error( Object messagePattern, Object arg )
{
if( m_delegate.isErrorEnabled() )
{
String msgStr = (String) messagePattern;
msgStr = MessageFormatter.format( msgStr, arg ); // depends on control dependency: [if], data = [none]
m_delegate.error( msgStr, null ); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public String getStringValue(Enum key) {
if (this._task.get(key) == null) {
return "";
} else {
return this._task.get(key);
}
} }
|
public class class_name {
public String getStringValue(Enum key) {
if (this._task.get(key) == null) {
return ""; // depends on control dependency: [if], data = [none]
} else {
return this._task.get(key); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public String getPhase() {
PhaseId myPhaseId = getPhaseId();
String result = null;
if (null != myPhaseId) {
result = myPhaseId.getName();
}
return result;
} }
|
public class class_name {
public String getPhase() {
PhaseId myPhaseId = getPhaseId();
String result = null;
if (null != myPhaseId) {
result = myPhaseId.getName(); // depends on control dependency: [if], data = [none]
}
return result;
} }
|
public class class_name {
@Override
public void configure(InputStream is) {
if (is == null) {
logger.error("No input stream to configure dictionary from?");
return;
}
try {
long startTime = System.currentTimeMillis();
this.avpByNameMap = new TreeMap<String, AvpRepresentation>(new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return (o1 == null) ? 1 : (o2 == null) ? -1 : o1.compareTo(o2);
}
});
this.vendorMap = new HashMap<String, String>();
this.typedefMap = new HashMap<String, String>();
this.avpMap = new HashMap<AvpRepresentation, AvpRepresentation>();
this.commandMap = new HashMap<MessageRepresentation, MessageRepresentation>();
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setValidating(false);
DocumentBuilder db = dbf.newDocumentBuilder();
Document doc = db.parse(is);
doc.getDocumentElement().normalize();
this.parseVendors(doc);
this.parseTypeDefs(doc);
this.parseAvps(doc);
this.parseCommands(doc);
this.configured = true;
long endTime = System.currentTimeMillis();
if (logger.isInfoEnabled()) {
logger.info("Mobicents Diameter Dictionary loaded in {}ms -- Vendors[{}] Commands[{}] Types[{}] AVPs[{}]",
new Object[] { (endTime - startTime), vendorMap.size(), commandMap.size(), typedefMap.size(), avpMap.size() });
}
if (logger.isInfoEnabled()) {
StringBuffer sb = new StringBuffer();
int c = 0;
for (AvpRepresentation key : this.avpMap.keySet()) {
if (this.avpMap.get(key).isWeak()) {
c++;
sb.append("---------------------------------\n").append("Found incomplete AVP definition:\n").append(this.avpMap.get(key)).append("\n");
}
}
if (c > 0) {
sb.append("------- TOTAL INCOMPLETE AVPS COUNT: ").append(c).append(" -------");
logger.info(sb.toString());
}
}
}
catch (Exception e) {
this.enabled = false;
this.configured = false;
logger.error("Failed to parse validator configuration. Validator disabled.", e);
}
finally {
// close?
try {
is.close();
}
catch (IOException e) {
logger.debug("Failed to close InputStream for Dictionary XML.", e);
}
}
} }
|
public class class_name {
@Override
public void configure(InputStream is) {
if (is == null) {
logger.error("No input stream to configure dictionary from?");
// depends on control dependency: [if], data = [none]
return;
// depends on control dependency: [if], data = [none]
}
try {
long startTime = System.currentTimeMillis();
this.avpByNameMap = new TreeMap<String, AvpRepresentation>(new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return (o1 == null) ? 1 : (o2 == null) ? -1 : o1.compareTo(o2);
}
});
// depends on control dependency: [try], data = [none]
this.vendorMap = new HashMap<String, String>();
// depends on control dependency: [try], data = [none]
this.typedefMap = new HashMap<String, String>();
// depends on control dependency: [try], data = [none]
this.avpMap = new HashMap<AvpRepresentation, AvpRepresentation>();
// depends on control dependency: [try], data = [none]
this.commandMap = new HashMap<MessageRepresentation, MessageRepresentation>();
// depends on control dependency: [try], data = [none]
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setValidating(false);
// depends on control dependency: [try], data = [none]
DocumentBuilder db = dbf.newDocumentBuilder();
Document doc = db.parse(is);
doc.getDocumentElement().normalize();
// depends on control dependency: [try], data = [none]
this.parseVendors(doc);
// depends on control dependency: [try], data = [none]
this.parseTypeDefs(doc);
// depends on control dependency: [try], data = [none]
this.parseAvps(doc);
// depends on control dependency: [try], data = [none]
this.parseCommands(doc);
// depends on control dependency: [try], data = [none]
this.configured = true;
// depends on control dependency: [try], data = [none]
long endTime = System.currentTimeMillis();
if (logger.isInfoEnabled()) {
logger.info("Mobicents Diameter Dictionary loaded in {}ms -- Vendors[{}] Commands[{}] Types[{}] AVPs[{}]",
new Object[] { (endTime - startTime), vendorMap.size(), commandMap.size(), typedefMap.size(), avpMap.size() });
// depends on control dependency: [if], data = [none]
}
if (logger.isInfoEnabled()) {
StringBuffer sb = new StringBuffer();
int c = 0;
for (AvpRepresentation key : this.avpMap.keySet()) {
if (this.avpMap.get(key).isWeak()) {
c++;
// depends on control dependency: [if], data = [none]
sb.append("---------------------------------\n").append("Found incomplete AVP definition:\n").append(this.avpMap.get(key)).append("\n");
// depends on control dependency: [if], data = [none]
}
}
if (c > 0) {
sb.append("------- TOTAL INCOMPLETE AVPS COUNT: ").append(c).append(" -------");
// depends on control dependency: [if], data = [(c]
logger.info(sb.toString());
// depends on control dependency: [if], data = [none]
}
}
}
catch (Exception e) {
this.enabled = false;
this.configured = false;
logger.error("Failed to parse validator configuration. Validator disabled.", e);
}
// depends on control dependency: [catch], data = [none]
finally {
// close?
try {
is.close();
// depends on control dependency: [try], data = [none]
}
catch (IOException e) {
logger.debug("Failed to close InputStream for Dictionary XML.", e);
}
// depends on control dependency: [catch], data = [none]
}
} }
|
public class class_name {
public int getWidth() {
int width = 0;
for ( final Iterator<Row> rowIter = this.rows.iterator(); rowIter.hasNext(); ) {
final Row row = rowIter.next();
final int rowWidth = row.getWidth();
if ( rowWidth > width ) {
width = rowWidth;
}
}
return width;
} }
|
public class class_name {
public int getWidth() {
int width = 0;
for ( final Iterator<Row> rowIter = this.rows.iterator(); rowIter.hasNext(); ) {
final Row row = rowIter.next();
final int rowWidth = row.getWidth();
if ( rowWidth > width ) {
width = rowWidth; // depends on control dependency: [if], data = [none]
}
}
return width;
} }
|
public class class_name {
@SuppressWarnings("unchecked")
private void setCookie(NativeObject result, HttpServletResponse response) {
Map<String, String> cookieMap = ScriptableObject.getTypedProperty(
result, "cookie", Map.class);
if (cookieMap == null) {
return;
}
Iterator<Entry<String, String>> iterator = cookieMap.entrySet()
.iterator();
while (iterator.hasNext()) {
String name = iterator.next().getKey();
Cookie cookie = new Cookie(name, cookieMap.get(name));
response.addCookie(cookie);
}
} }
|
public class class_name {
@SuppressWarnings("unchecked")
private void setCookie(NativeObject result, HttpServletResponse response) {
Map<String, String> cookieMap = ScriptableObject.getTypedProperty(
result, "cookie", Map.class);
if (cookieMap == null) {
return; // depends on control dependency: [if], data = [none]
}
Iterator<Entry<String, String>> iterator = cookieMap.entrySet()
.iterator();
while (iterator.hasNext()) {
String name = iterator.next().getKey();
Cookie cookie = new Cookie(name, cookieMap.get(name));
response.addCookie(cookie); // depends on control dependency: [while], data = [none]
}
} }
|
public class class_name {
public static void sudoSave(final ODocumentWrapper... dws) {
if(dws==null || dws.length==0) return;
new DBClosure<Boolean>() {
@Override
protected Boolean execute(ODatabaseDocument db) {
db.begin();
for (ODocumentWrapper dw : dws) {
dw.save();
}
db.commit();
return true;
}
}.execute();
} }
|
public class class_name {
public static void sudoSave(final ODocumentWrapper... dws) {
if(dws==null || dws.length==0) return;
new DBClosure<Boolean>() {
@Override
protected Boolean execute(ODatabaseDocument db) {
db.begin();
for (ODocumentWrapper dw : dws) {
dw.save(); // depends on control dependency: [for], data = [dw]
}
db.commit();
return true;
}
}.execute();
} }
|
public class class_name {
public List<Entry<String, ResourceList>> findDuplicatePaths() {
final List<Entry<String, ResourceList>> duplicatePaths = new ArrayList<>();
for (final Entry<String, ResourceList> pathAndResourceList : asMap().entrySet()) {
// Find ResourceLists with two or more entries
if (pathAndResourceList.getValue().size() > 1) {
duplicatePaths.add(new SimpleEntry<>(pathAndResourceList.getKey(), pathAndResourceList.getValue()));
}
}
CollectionUtils.sortIfNotEmpty(duplicatePaths, new Comparator<Entry<String, ResourceList>>() {
@Override
public int compare(final Entry<String, ResourceList> o1, final Entry<String, ResourceList> o2) {
// Sort in lexicographic order of path
return o1.getKey().compareTo(o2.getKey());
}
});
return duplicatePaths;
} }
|
public class class_name {
public List<Entry<String, ResourceList>> findDuplicatePaths() {
final List<Entry<String, ResourceList>> duplicatePaths = new ArrayList<>();
for (final Entry<String, ResourceList> pathAndResourceList : asMap().entrySet()) {
// Find ResourceLists with two or more entries
if (pathAndResourceList.getValue().size() > 1) {
duplicatePaths.add(new SimpleEntry<>(pathAndResourceList.getKey(), pathAndResourceList.getValue())); // depends on control dependency: [if], data = [none]
}
}
CollectionUtils.sortIfNotEmpty(duplicatePaths, new Comparator<Entry<String, ResourceList>>() {
@Override
public int compare(final Entry<String, ResourceList> o1, final Entry<String, ResourceList> o2) {
// Sort in lexicographic order of path
return o1.getKey().compareTo(o2.getKey());
}
});
return duplicatePaths;
} }
|
public class class_name {
public void handleUpdateUser(String jsonp, String target,
HttpServletRequest request,
HttpServletResponse response, AuthenticationResult ar)
throws IOException, ServletException {
String update = request.getParameter("user");
if (update == null || update.trim().length() == 0) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
response.getWriter().print(buildClientResponse(jsonp, ClientResponse.UNEXPECTED_FAILURE, "Failed to get user information."));
return;
}
try {
User newUser = m_mapper.readValue(update, User.class);
if (newUser == null) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
response.getWriter().print(buildClientResponse(jsonp, ClientResponse.UNEXPECTED_FAILURE, "Failed to parse user information."));
return;
}
DeploymentType newDeployment = CatalogUtil.getDeployment(new ByteArrayInputStream(getDeploymentBytes()));
User user = null;
String[] splitTarget = target.split("/");
if (splitTarget.length == 3) {
user = findUser(splitTarget[2], newDeployment);
}
if (user == null) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
response.getWriter().print(buildClientResponse(jsonp, ClientResponse.UNEXPECTED_FAILURE, "User not found"));
return;
}
user.setName(newUser.getName());
user.setPassword(newUser.getPassword());
user.setPlaintext(newUser.isPlaintext());
user.setRoles(newUser.getRoles());
String dep = CatalogUtil.getDeployment(newDeployment);
if (dep == null || dep.trim().length() <= 0) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
response.getWriter().print(buildClientResponse(jsonp, ClientResponse.UNEXPECTED_FAILURE, "Failed to build deployment information."));
return;
}
Object[] params = new Object[]{null, dep};
//Call sync as nothing else can happen when this is going on.
SyncCallback cb = new SyncCallback();
httpClientInterface.callProcedure(request.getRemoteHost(), ar, BatchTimeoutOverrideType.NO_TIMEOUT, cb, "@UpdateApplicationCatalog", params);
cb.waitForResponse();
ClientResponseImpl r = ClientResponseImpl.class.cast(cb.getResponse());
if (r.getStatus() == ClientResponse.SUCCESS) {
response.getWriter().print(buildClientResponse(jsonp, ClientResponse.SUCCESS, "User Updated."));
} else {
response.getWriter().print(HTTPClientInterface.asJsonp(jsonp, r.toJSONString()));
}
} catch (Exception ex) {
m_log.error("Failed to update user from API", ex);
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
response.getWriter().print(buildClientResponse(jsonp, ClientResponse.UNEXPECTED_FAILURE, Throwables.getStackTraceAsString(ex)));
}
} }
|
public class class_name {
public void handleUpdateUser(String jsonp, String target,
HttpServletRequest request,
HttpServletResponse response, AuthenticationResult ar)
throws IOException, ServletException {
String update = request.getParameter("user");
if (update == null || update.trim().length() == 0) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
response.getWriter().print(buildClientResponse(jsonp, ClientResponse.UNEXPECTED_FAILURE, "Failed to get user information."));
return;
}
try {
User newUser = m_mapper.readValue(update, User.class);
if (newUser == null) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST); // depends on control dependency: [if], data = [none]
response.getWriter().print(buildClientResponse(jsonp, ClientResponse.UNEXPECTED_FAILURE, "Failed to parse user information.")); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
DeploymentType newDeployment = CatalogUtil.getDeployment(new ByteArrayInputStream(getDeploymentBytes()));
User user = null;
String[] splitTarget = target.split("/");
if (splitTarget.length == 3) {
user = findUser(splitTarget[2], newDeployment); // depends on control dependency: [if], data = [none]
}
if (user == null) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND); // depends on control dependency: [if], data = [none]
response.getWriter().print(buildClientResponse(jsonp, ClientResponse.UNEXPECTED_FAILURE, "User not found")); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
user.setName(newUser.getName());
user.setPassword(newUser.getPassword());
user.setPlaintext(newUser.isPlaintext());
user.setRoles(newUser.getRoles());
String dep = CatalogUtil.getDeployment(newDeployment);
if (dep == null || dep.trim().length() <= 0) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST); // depends on control dependency: [if], data = [none]
response.getWriter().print(buildClientResponse(jsonp, ClientResponse.UNEXPECTED_FAILURE, "Failed to build deployment information.")); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
Object[] params = new Object[]{null, dep};
//Call sync as nothing else can happen when this is going on.
SyncCallback cb = new SyncCallback();
httpClientInterface.callProcedure(request.getRemoteHost(), ar, BatchTimeoutOverrideType.NO_TIMEOUT, cb, "@UpdateApplicationCatalog", params);
cb.waitForResponse();
ClientResponseImpl r = ClientResponseImpl.class.cast(cb.getResponse());
if (r.getStatus() == ClientResponse.SUCCESS) {
response.getWriter().print(buildClientResponse(jsonp, ClientResponse.SUCCESS, "User Updated.")); // depends on control dependency: [if], data = [none]
} else {
response.getWriter().print(HTTPClientInterface.asJsonp(jsonp, r.toJSONString())); // depends on control dependency: [if], data = [none]
}
} catch (Exception ex) {
m_log.error("Failed to update user from API", ex);
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
response.getWriter().print(buildClientResponse(jsonp, ClientResponse.UNEXPECTED_FAILURE, Throwables.getStackTraceAsString(ex)));
}
} }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.