code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
public void observeBeforeBeanDiscovery(@Observes BeforeBeanDiscovery bbd, BeanManager beanManager) {
log.debugf("MPJWTExtension(), adding producers");
String extensionName = MPJWTExtension.class.getName();
for (Class<?> clazz : new Class<?>[] {
JWTAuthContextInfoProvider.class,
CommonJwtProducer.class,
PrincipalProducer.class,
RawClaimTypeProducer.class,
ClaimValueProducer.class,
JsonValueProducer.class,
}) {
bbd.addAnnotatedType(beanManager.createAnnotatedType(clazz), extensionName + "_" + clazz.getName());
}
} } | public class class_name {
public void observeBeforeBeanDiscovery(@Observes BeforeBeanDiscovery bbd, BeanManager beanManager) {
log.debugf("MPJWTExtension(), adding producers");
String extensionName = MPJWTExtension.class.getName();
for (Class<?> clazz : new Class<?>[] {
JWTAuthContextInfoProvider.class,
CommonJwtProducer.class,
PrincipalProducer.class,
RawClaimTypeProducer.class,
ClaimValueProducer.class,
JsonValueProducer.class,
}) {
bbd.addAnnotatedType(beanManager.createAnnotatedType(clazz), extensionName + "_" + clazz.getName()); // depends on control dependency: [for], data = [clazz]
}
} } |
public class class_name {
public String getContextPath()
{
//PM47487
String cp = context.getContextPath();
if (WCCustomProperties.RETURN_DEFAULT_CONTEXT_PATH && cp.equals("/")){
cp = "";
}
//PM47487
return cp;
} } | public class class_name {
public String getContextPath()
{
//PM47487
String cp = context.getContextPath();
if (WCCustomProperties.RETURN_DEFAULT_CONTEXT_PATH && cp.equals("/")){
cp = ""; // depends on control dependency: [if], data = [none]
}
//PM47487
return cp;
} } |
public class class_name {
@Override
protected Map<String, Object> transformElement(T entity) {
try {
return UtilJdbc.getRowFromObject(entity);
} catch(IllegalAccessException | InvocationTargetException | InstantiationException e) {
throw new DeepTransformException(e);
}
} } | public class class_name {
@Override
protected Map<String, Object> transformElement(T entity) {
try {
return UtilJdbc.getRowFromObject(entity); // depends on control dependency: [try], data = [none]
} catch(IllegalAccessException | InvocationTargetException | InstantiationException e) {
throw new DeepTransformException(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public int toInteger(Object val) {
if (val == null) {
return 0;
}
else if (val instanceof Double) {
if (!Double.isNaN(((Double) val).doubleValue())) {
return 0;
}
else {
return ((Double) val).intValue();
}
}
else if (val instanceof Number) {
return ((Number) val).intValue();
}
else if (val instanceof String) {
if ("".equals(val)) {
return 0;
}
return Integer.parseInt((String) val);
}
else if (val instanceof Boolean) {
return ((Boolean) val).booleanValue() ? 1 : 0;
}
else if (val instanceof Character) {
return ((Character) val).charValue();
}
throw new ArithmeticException("Integer coercion: " + val.getClass().getName() + ":(" + val + ")");
} } | public class class_name {
public int toInteger(Object val) {
if (val == null) {
return 0; // depends on control dependency: [if], data = [none]
}
else if (val instanceof Double) {
if (!Double.isNaN(((Double) val).doubleValue())) {
return 0; // depends on control dependency: [if], data = [none]
}
else {
return ((Double) val).intValue(); // depends on control dependency: [if], data = [none]
}
}
else if (val instanceof Number) {
return ((Number) val).intValue(); // depends on control dependency: [if], data = [none]
}
else if (val instanceof String) {
if ("".equals(val)) {
return 0; // depends on control dependency: [if], data = [none]
}
return Integer.parseInt((String) val); // depends on control dependency: [if], data = [none]
}
else if (val instanceof Boolean) {
return ((Boolean) val).booleanValue() ? 1 : 0; // depends on control dependency: [if], data = [none]
}
else if (val instanceof Character) {
return ((Character) val).charValue(); // depends on control dependency: [if], data = [none]
}
throw new ArithmeticException("Integer coercion: " + val.getClass().getName() + ":(" + val + ")");
} } |
public class class_name {
public static String rejectChar(String string, CharPredicate predicate)
{
int size = string.length();
StringBuilder buffer = new StringBuilder(string.length());
for (int i = 0; i < size; i++)
{
char character = string.charAt(i);
if (!predicate.accept(character))
{
buffer.append(character);
}
}
return buffer.toString();
} } | public class class_name {
public static String rejectChar(String string, CharPredicate predicate)
{
int size = string.length();
StringBuilder buffer = new StringBuilder(string.length());
for (int i = 0; i < size; i++)
{
char character = string.charAt(i);
if (!predicate.accept(character))
{
buffer.append(character); // depends on control dependency: [if], data = [none]
}
}
return buffer.toString();
} } |
public class class_name {
public void processCompleteFrame() throws Http2Exception {
Frame currentFrame = getCurrentFrame();
boolean frameSizeError = false;
try {
currentFrame.processPayload(this);
} catch (Http2Exception e) {
// If we get an error here, it should be safe to assume that this frame doesn't have the expected byte count,
// which must be treated as an error of type FRAME_SIZE_ERROR. If we're processing a DATA or PRIORITY frame, then
// we can treat the error as a stream error rather than a connection error.
if (!e.isConnectionError()) {
frameSizeError = true;
} else {
// this is a connection error; we need to send a GOAWAY on the connection
throw e;
}
} catch (Exception e) {
throw new ProtocolException("Error processing the payload for " + currentFrame.getFrameType()
+ " frame on stream " + currentFrame.getStreamId());
}
// call the stream processor to process this stream. For now, don't return from here until the
// frame has been fully processed.
int streamId = currentFrame.getStreamId();
H2StreamProcessor stream = muxLink.getStream(streamId);
if (stream == null) {
if ((streamId != 0) && (streamId % 2 == 0)) {
if (currentFrame.getFrameType().equals(FrameTypes.PRIORITY)) {
// ignore PRIORITY frames in any state
return;
} else if (currentFrame.getFrameType().equals(FrameTypes.RST_STREAM) && streamId < muxLink.getHighestClientStreamId()) {
// tolerate RST_STREAM frames that are sent on closed push streams
return;
} else {
throw new ProtocolException("Cannot start a stream from the client with an even numbered ID. stream-id: " + streamId);
}
} else {
stream = startNewInboundSession(streamId);
}
}
if (frameSizeError) {
currentFrame = new FrameRstStream(streamId, 4, (byte) 0, false, FrameDirection.READ);
((FrameRstStream) currentFrame).setErrorCode(Constants.FRAME_SIZE_ERROR);
}
stream.processNextFrame(currentFrame, Direction.READ_IN);
} } | public class class_name {
public void processCompleteFrame() throws Http2Exception {
Frame currentFrame = getCurrentFrame();
boolean frameSizeError = false;
try {
currentFrame.processPayload(this);
} catch (Http2Exception e) {
// If we get an error here, it should be safe to assume that this frame doesn't have the expected byte count,
// which must be treated as an error of type FRAME_SIZE_ERROR. If we're processing a DATA or PRIORITY frame, then
// we can treat the error as a stream error rather than a connection error.
if (!e.isConnectionError()) {
frameSizeError = true; // depends on control dependency: [if], data = [none]
} else {
// this is a connection error; we need to send a GOAWAY on the connection
throw e;
}
} catch (Exception e) {
throw new ProtocolException("Error processing the payload for " + currentFrame.getFrameType()
+ " frame on stream " + currentFrame.getStreamId());
}
// call the stream processor to process this stream. For now, don't return from here until the
// frame has been fully processed.
int streamId = currentFrame.getStreamId();
H2StreamProcessor stream = muxLink.getStream(streamId);
if (stream == null) {
if ((streamId != 0) && (streamId % 2 == 0)) {
if (currentFrame.getFrameType().equals(FrameTypes.PRIORITY)) {
// ignore PRIORITY frames in any state
return; // depends on control dependency: [if], data = [none]
} else if (currentFrame.getFrameType().equals(FrameTypes.RST_STREAM) && streamId < muxLink.getHighestClientStreamId()) {
// tolerate RST_STREAM frames that are sent on closed push streams
return; // depends on control dependency: [if], data = [none]
} else {
throw new ProtocolException("Cannot start a stream from the client with an even numbered ID. stream-id: " + streamId);
}
} else {
stream = startNewInboundSession(streamId);
}
}
if (frameSizeError) {
currentFrame = new FrameRstStream(streamId, 4, (byte) 0, false, FrameDirection.READ);
((FrameRstStream) currentFrame).setErrorCode(Constants.FRAME_SIZE_ERROR);
}
stream.processNextFrame(currentFrame, Direction.READ_IN);
} } |
public class class_name {
public boolean setNewVariable(Object key, Object value) {
boolean success = false;
success = setLocalVariable(key, value);
if (!success) {
setGlobalVariable(key, value);
success = true;
}
return success;
} } | public class class_name {
public boolean setNewVariable(Object key, Object value) {
boolean success = false;
success = setLocalVariable(key, value);
if (!success) {
setGlobalVariable(key, value); // depends on control dependency: [if], data = [none]
success = true; // depends on control dependency: [if], data = [none]
}
return success;
} } |
public class class_name {
public FindingFilter withAutoScalingGroups(String... autoScalingGroups) {
if (this.autoScalingGroups == null) {
setAutoScalingGroups(new java.util.ArrayList<String>(autoScalingGroups.length));
}
for (String ele : autoScalingGroups) {
this.autoScalingGroups.add(ele);
}
return this;
} } | public class class_name {
public FindingFilter withAutoScalingGroups(String... autoScalingGroups) {
if (this.autoScalingGroups == null) {
setAutoScalingGroups(new java.util.ArrayList<String>(autoScalingGroups.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : autoScalingGroups) {
this.autoScalingGroups.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
synchronized public void recover() {
if (tc.isEntryEnabled())
Tr.entry(tc, "recover", this);
final int state = _status.getState();
if (_subordinate) {
// For a subordinate, first check whether the global outcome is known locally.
switch (state) {
// Due to the possibility of recovery being attempted asynchronously to
// an incoming superior request, we must cover the case where the
// transaction has now actually committed already.
case TransactionState.STATE_HEURISTIC_ON_COMMIT:
case TransactionState.STATE_COMMITTED:
case TransactionState.STATE_COMMITTING:
recoverCommit(true);
break;
// Due to the possibility of recovery being attempted asynchronously to
// an incoming superior request, we must cover the case where the
// transaction has now actually rolled back already.
case TransactionState.STATE_HEURISTIC_ON_ROLLBACK:
case TransactionState.STATE_ROLLED_BACK:
case TransactionState.STATE_ROLLING_BACK:
recoverRollback(true);
break;
// For a subordinate, the replay_completion method is invoked on the superior.
// If the number of times the replay_completion has been retried is greater
// than the value specified by COMMITRETRY, then HEURISTICDIRECTION is used
// to determine the transaction outcome.
default:
// If we were imported from a JCA provider, check whether it's still installed.
// If so, we need do nothing here since we expect the RA to complete the transaction.
// Otherwise, we will complete using the configured direction.
if (_JCARecoveryData != null) {
final String id = _JCARecoveryData.getWrapper().getProviderId();
if (TMHelper.isProviderInstalled(id)) {
if (tc.isDebugEnabled())
Tr.debug(tc, "recover", "Do nothing. Expect provider " + id + " will complete.");
// Do nothing. RA is responsible for completing.
} else {
switch (_configProvider.getHeuristicCompletionDirection()) {
case ConfigurationProvider.HEURISTIC_COMPLETION_DIRECTION_COMMIT:
Tr.error(tc, "WTRN0098_COMMIT_RA_UNINSTALLED", new Object[] { getTranName(), id });
recoverCommit(false);
break;
case ConfigurationProvider.HEURISTIC_COMPLETION_DIRECTION_MANUAL:
// do nothing, administrative completion is required
_needsManualCompletion = true;
Tr.info(tc, "WTRN0101_MANUAL_RA_UNINSTALLED", new Object[] { getTranName(), id });
break;
default:
Tr.error(tc, "WTRN0099_ROLLBACK_RA_UNINSTALLED", new Object[] { getTranName(), id });
recoverRollback(false);
}
}
} else {
retryCompletion();
}
break;
}
} else {
// For a top-level Transaction, we will only recover in the case
// where we have successfully prepared. If the state is not committing,
// then assume it is rollback.
if (state == TransactionState.STATE_LAST_PARTICIPANT) {
// LIDB1673-13 lps heuristic completion.
// The transaction was attempting to complete its
// 1PC resource when the server went down.
// Use the lpsHeuristicCompletion flag to determine
// how to complete the tx.
switch (ConfigurationProviderManager.getConfigurationProvider().getHeuristicCompletionDirection()) {
case ConfigurationProvider.HEURISTIC_COMPLETION_DIRECTION_COMMIT:
Tr.error(tc, "WTRN0096_HEURISTIC_MAY_HAVE_OCCURED", getTranName());
recoverCommit(false);
break;
case ConfigurationProvider.HEURISTIC_COMPLETION_DIRECTION_MANUAL:
// do nothing!?
_needsManualCompletion = true;
Tr.info(tc, "WTRN0097_HEURISTIC_MANUAL_COMPLETION", getTranName());
break;
default:
Tr.error(tc, "WTRN0102_HEURISTIC_MAY_HAVE_OCCURED", getTranName());
recoverRollback(false);
}
} else if (state == TransactionState.STATE_COMMITTING)
recoverCommit(false);
else
recoverRollback(false);
}
if (tc.isEntryEnabled())
Tr.exit(tc, "recover");
} } | public class class_name {
synchronized public void recover() {
if (tc.isEntryEnabled())
Tr.entry(tc, "recover", this);
final int state = _status.getState();
if (_subordinate) {
// For a subordinate, first check whether the global outcome is known locally.
switch (state) {
// Due to the possibility of recovery being attempted asynchronously to
// an incoming superior request, we must cover the case where the
// transaction has now actually committed already.
case TransactionState.STATE_HEURISTIC_ON_COMMIT:
case TransactionState.STATE_COMMITTED:
case TransactionState.STATE_COMMITTING:
recoverCommit(true);
break;
// Due to the possibility of recovery being attempted asynchronously to
// an incoming superior request, we must cover the case where the
// transaction has now actually rolled back already.
case TransactionState.STATE_HEURISTIC_ON_ROLLBACK:
case TransactionState.STATE_ROLLED_BACK:
case TransactionState.STATE_ROLLING_BACK:
recoverRollback(true);
break;
// For a subordinate, the replay_completion method is invoked on the superior.
// If the number of times the replay_completion has been retried is greater
// than the value specified by COMMITRETRY, then HEURISTICDIRECTION is used
// to determine the transaction outcome.
default:
// If we were imported from a JCA provider, check whether it's still installed.
// If so, we need do nothing here since we expect the RA to complete the transaction.
// Otherwise, we will complete using the configured direction.
if (_JCARecoveryData != null) {
final String id = _JCARecoveryData.getWrapper().getProviderId();
if (TMHelper.isProviderInstalled(id)) {
if (tc.isDebugEnabled())
Tr.debug(tc, "recover", "Do nothing. Expect provider " + id + " will complete.");
// Do nothing. RA is responsible for completing.
} else {
switch (_configProvider.getHeuristicCompletionDirection()) {
case ConfigurationProvider.HEURISTIC_COMPLETION_DIRECTION_COMMIT:
Tr.error(tc, "WTRN0098_COMMIT_RA_UNINSTALLED", new Object[] { getTranName(), id });
recoverCommit(false);
break;
case ConfigurationProvider.HEURISTIC_COMPLETION_DIRECTION_MANUAL:
// do nothing, administrative completion is required
_needsManualCompletion = true;
Tr.info(tc, "WTRN0101_MANUAL_RA_UNINSTALLED", new Object[] { getTranName(), id });
break;
default:
Tr.error(tc, "WTRN0099_ROLLBACK_RA_UNINSTALLED", new Object[] { getTranName(), id });
recoverRollback(false);
}
}
} else { // depends on control dependency: [if], data = [none]
retryCompletion();
}
break;
}
} else { // depends on control dependency: [if], data = [none]
// For a top-level Transaction, we will only recover in the case
// where we have successfully prepared. If the state is not committing,
// then assume it is rollback.
if (state == TransactionState.STATE_LAST_PARTICIPANT) {
// LIDB1673-13 lps heuristic completion.
// The transaction was attempting to complete its
// 1PC resource when the server went down.
// Use the lpsHeuristicCompletion flag to determine
// how to complete the tx.
switch (ConfigurationProviderManager.getConfigurationProvider().getHeuristicCompletionDirection()) {
case ConfigurationProvider.HEURISTIC_COMPLETION_DIRECTION_COMMIT:
Tr.error(tc, "WTRN0096_HEURISTIC_MAY_HAVE_OCCURED", getTranName());
recoverCommit(false);
break;
case ConfigurationProvider.HEURISTIC_COMPLETION_DIRECTION_MANUAL:
// do nothing!?
_needsManualCompletion = true;
Tr.info(tc, "WTRN0097_HEURISTIC_MANUAL_COMPLETION", getTranName());
break;
default:
Tr.error(tc, "WTRN0102_HEURISTIC_MAY_HAVE_OCCURED", getTranName());
recoverRollback(false);
}
} else if (state == TransactionState.STATE_COMMITTING)
recoverCommit(false);
else
recoverRollback(false);
}
if (tc.isEntryEnabled())
Tr.exit(tc, "recover");
} } |
public class class_name {
@Subscribe
public void onSegmentWritten(HlsSegmentWrittenEvent event) {
try {
File hlsSegment = event.getSegment();
queueOrSubmitUpload(keyForFilename(hlsSegment.getName()), hlsSegment);
if (isKitKat() && mConfig.isAdaptiveBitrate() && isRecording()) {
// Adjust bitrate to match expected filesize
long actualSegmentSizeBytes = hlsSegment.length();
long expectedSizeBytes = ((mConfig.getAudioBitrate() / 8) + (mVideoBitrate / 8)) * mConfig.getHlsSegmentDuration();
float filesizeRatio = actualSegmentSizeBytes / (float) expectedSizeBytes;
if (VERBOSE)
Log.i(TAG, "OnSegmentWritten. Segment size: " + (actualSegmentSizeBytes / 1000) + "kB. ratio: " + filesizeRatio);
if (filesizeRatio < .7) {
if (mLastRealizedBandwidthBytesPerSec != 0) {
// Scale bitrate while not exceeding available bandwidth
float scaledBitrate = mVideoBitrate * (1 / filesizeRatio);
float bandwidthBitrate = mLastRealizedBandwidthBytesPerSec * 8;
mVideoBitrate = (int) Math.min(scaledBitrate, bandwidthBitrate);
} else {
// Scale bitrate to match expected fileSize
mVideoBitrate *= (1 / filesizeRatio);
}
if (VERBOSE) Log.i(TAG, "Scaling video bitrate to " + mVideoBitrate + " bps");
adjustVideoBitrate(mVideoBitrate);
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
} } | public class class_name {
@Subscribe
public void onSegmentWritten(HlsSegmentWrittenEvent event) {
try {
File hlsSegment = event.getSegment();
queueOrSubmitUpload(keyForFilename(hlsSegment.getName()), hlsSegment); // depends on control dependency: [try], data = [none]
if (isKitKat() && mConfig.isAdaptiveBitrate() && isRecording()) {
// Adjust bitrate to match expected filesize
long actualSegmentSizeBytes = hlsSegment.length();
long expectedSizeBytes = ((mConfig.getAudioBitrate() / 8) + (mVideoBitrate / 8)) * mConfig.getHlsSegmentDuration();
float filesizeRatio = actualSegmentSizeBytes / (float) expectedSizeBytes;
if (VERBOSE)
Log.i(TAG, "OnSegmentWritten. Segment size: " + (actualSegmentSizeBytes / 1000) + "kB. ratio: " + filesizeRatio);
if (filesizeRatio < .7) {
if (mLastRealizedBandwidthBytesPerSec != 0) {
// Scale bitrate while not exceeding available bandwidth
float scaledBitrate = mVideoBitrate * (1 / filesizeRatio);
float bandwidthBitrate = mLastRealizedBandwidthBytesPerSec * 8;
mVideoBitrate = (int) Math.min(scaledBitrate, bandwidthBitrate); // depends on control dependency: [if], data = [none]
} else {
// Scale bitrate to match expected fileSize
mVideoBitrate *= (1 / filesizeRatio); // depends on control dependency: [if], data = [none]
}
if (VERBOSE) Log.i(TAG, "Scaling video bitrate to " + mVideoBitrate + " bps");
adjustVideoBitrate(mVideoBitrate); // depends on control dependency: [if], data = [none]
}
}
} catch (Exception ex) {
ex.printStackTrace();
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private void processWorkingHours(ProjectCalendar mpxjCalendar, Sequence uniqueID, Day day, List<OverriddenDayType> typeList)
{
if (isWorkingDay(mpxjCalendar, day))
{
ProjectCalendarHours mpxjHours = mpxjCalendar.getCalendarHours(day);
if (mpxjHours != null)
{
OverriddenDayType odt = m_factory.createOverriddenDayType();
typeList.add(odt);
odt.setId(getIntegerString(uniqueID.next()));
List<Interval> intervalList = odt.getInterval();
for (DateRange mpxjRange : mpxjHours)
{
Date rangeStart = mpxjRange.getStart();
Date rangeEnd = mpxjRange.getEnd();
if (rangeStart != null && rangeEnd != null)
{
Interval interval = m_factory.createInterval();
intervalList.add(interval);
interval.setStart(getTimeString(rangeStart));
interval.setEnd(getTimeString(rangeEnd));
}
}
}
}
} } | public class class_name {
private void processWorkingHours(ProjectCalendar mpxjCalendar, Sequence uniqueID, Day day, List<OverriddenDayType> typeList)
{
if (isWorkingDay(mpxjCalendar, day))
{
ProjectCalendarHours mpxjHours = mpxjCalendar.getCalendarHours(day);
if (mpxjHours != null)
{
OverriddenDayType odt = m_factory.createOverriddenDayType();
typeList.add(odt); // depends on control dependency: [if], data = [none]
odt.setId(getIntegerString(uniqueID.next())); // depends on control dependency: [if], data = [none]
List<Interval> intervalList = odt.getInterval();
for (DateRange mpxjRange : mpxjHours)
{
Date rangeStart = mpxjRange.getStart();
Date rangeEnd = mpxjRange.getEnd();
if (rangeStart != null && rangeEnd != null)
{
Interval interval = m_factory.createInterval();
intervalList.add(interval); // depends on control dependency: [if], data = [none]
interval.setStart(getTimeString(rangeStart)); // depends on control dependency: [if], data = [(rangeStart]
interval.setEnd(getTimeString(rangeEnd)); // depends on control dependency: [if], data = [none]
}
}
}
}
} } |
public class class_name {
private void doAPKWithAPKBuilder( File outputFile, File dexFile, File zipArchive, Collection<File> sourceFolders,
List<File> jarFiles, Collection<File> nativeFolders,
boolean signWithDebugKeyStore ) throws MojoExecutionException
{
getLog().debug( "Building APK with internal APKBuilder" );
//A when jack is running the classes directory will not get filled (usually)
// so let's skip it if it wasn't created by something else
if ( projectOutputDirectory.exists() || !getJack().isEnabled() )
{
sourceFolders.add( projectOutputDirectory );
}
for ( Artifact artifact : filterArtifacts( getRelevantCompileArtifacts(), skipDependencies,
artifactTypeSet.getIncludes(), artifactTypeSet.getExcludes(), artifactSet.getIncludes(),
artifactSet.getExcludes() ) )
{
getLog().debug( "Found artifact for APK :" + artifact );
if ( extractDuplicates )
{
try
{
computeDuplicateFiles( artifact.getFile() );
}
catch ( Exception e )
{
getLog().warn( "Cannot compute duplicates files from " + artifact.getFile().getAbsolutePath(), e );
}
}
jarFiles.add( artifact.getFile() );
}
for ( File src : sourceFolders )
{
computeDuplicateFilesInSource( src );
}
// Check duplicates.
if ( extractDuplicates )
{
try
{
extractDuplicateFiles( jarFiles, sourceFolders );
}
catch ( IOException e )
{
getLog().error( "Could not extract duplicates to duplicate-resources.jar", e );
}
}
try
{
final String debugKeyStore = signWithDebugKeyStore ? ApkBuilder.getDebugKeystore() : null;
final ApkBuilder apkBuilder = new ApkBuilder( outputFile, zipArchive, dexFile, debugKeyStore, null );
if ( apkDebug )
{
apkBuilder.setDebugMode( true );
}
for ( File sourceFolder : sourceFolders )
{
getLog().debug( "Adding source folder : " + sourceFolder );
// Use ApkBuilder#addFile() to explicitly add resource files so that we can add META-INF/services.
addResourcesFromFolder( apkBuilder, sourceFolder );
}
for ( File jarFile : jarFiles )
{
boolean excluded = false;
if ( excludeJarResourcesPatterns != null )
{
final String name = jarFile.getName();
getLog().debug( "Checking " + name + " against patterns" );
for ( Pattern pattern : excludeJarResourcesPatterns )
{
final Matcher matcher = pattern.matcher( name );
if ( matcher.matches() )
{
getLog().debug( "Jar " + name + " excluded by pattern " + pattern );
excluded = true;
break;
}
else
{
getLog().debug( "Jar " + name + " not excluded by pattern " + pattern );
}
}
}
if ( excluded )
{
continue;
}
if ( jarFile.isDirectory() )
{
getLog().debug( "Adding resources from jar folder : " + jarFile );
final String[] filenames = jarFile.list( new FilenameFilter()
{
public boolean accept( File dir, String name )
{
return PATTERN_JAR_EXT.matcher( name ).matches();
}
} );
for ( String filename : filenames )
{
final File innerJar = new File( jarFile, filename );
getLog().debug( "Adding resources from innerJar : " + innerJar );
apkBuilder.addResourcesFromJar( innerJar );
}
}
else
{
getLog().debug( "Adding resources from : " + jarFile );
apkBuilder.addResourcesFromJar( jarFile );
}
}
addSecondaryDexes( dexFile, apkBuilder );
for ( File nativeFolder : nativeFolders )
{
getLog().debug( "Adding native library : " + nativeFolder );
apkBuilder.addNativeLibraries( nativeFolder );
}
apkBuilder.sealApk();
}
catch ( ApkCreationException | SealedApkException | IOException e )
{
throw new MojoExecutionException( e.getMessage(), e );
}
catch ( DuplicateFileException e )
{
final String msg = String.format( "Duplicated file: %s, found in archive %s and %s",
e.getArchivePath(), e.getFile1(), e.getFile2() );
throw new MojoExecutionException( msg, e );
}
} } | public class class_name {
private void doAPKWithAPKBuilder( File outputFile, File dexFile, File zipArchive, Collection<File> sourceFolders,
List<File> jarFiles, Collection<File> nativeFolders,
boolean signWithDebugKeyStore ) throws MojoExecutionException
{
getLog().debug( "Building APK with internal APKBuilder" );
//A when jack is running the classes directory will not get filled (usually)
// so let's skip it if it wasn't created by something else
if ( projectOutputDirectory.exists() || !getJack().isEnabled() )
{
sourceFolders.add( projectOutputDirectory );
}
for ( Artifact artifact : filterArtifacts( getRelevantCompileArtifacts(), skipDependencies,
artifactTypeSet.getIncludes(), artifactTypeSet.getExcludes(), artifactSet.getIncludes(),
artifactSet.getExcludes() ) )
{
getLog().debug( "Found artifact for APK :" + artifact );
if ( extractDuplicates )
{
try
{
computeDuplicateFiles( artifact.getFile() ); // depends on control dependency: [try], data = [none]
}
catch ( Exception e )
{
getLog().warn( "Cannot compute duplicates files from " + artifact.getFile().getAbsolutePath(), e );
} // depends on control dependency: [catch], data = [none]
}
jarFiles.add( artifact.getFile() );
}
for ( File src : sourceFolders )
{
computeDuplicateFilesInSource( src );
}
// Check duplicates.
if ( extractDuplicates )
{
try
{
extractDuplicateFiles( jarFiles, sourceFolders ); // depends on control dependency: [try], data = [none]
}
catch ( IOException e )
{
getLog().error( "Could not extract duplicates to duplicate-resources.jar", e );
} // depends on control dependency: [catch], data = [none]
}
try
{
final String debugKeyStore = signWithDebugKeyStore ? ApkBuilder.getDebugKeystore() : null;
final ApkBuilder apkBuilder = new ApkBuilder( outputFile, zipArchive, dexFile, debugKeyStore, null );
if ( apkDebug )
{
apkBuilder.setDebugMode( true ); // depends on control dependency: [if], data = [none]
}
for ( File sourceFolder : sourceFolders )
{
getLog().debug( "Adding source folder : " + sourceFolder ); // depends on control dependency: [for], data = [sourceFolder]
// Use ApkBuilder#addFile() to explicitly add resource files so that we can add META-INF/services.
addResourcesFromFolder( apkBuilder, sourceFolder ); // depends on control dependency: [for], data = [sourceFolder]
}
for ( File jarFile : jarFiles )
{
boolean excluded = false;
if ( excludeJarResourcesPatterns != null )
{
final String name = jarFile.getName();
getLog().debug( "Checking " + name + " against patterns" ); // depends on control dependency: [if], data = [none]
for ( Pattern pattern : excludeJarResourcesPatterns )
{
final Matcher matcher = pattern.matcher( name );
if ( matcher.matches() )
{
getLog().debug( "Jar " + name + " excluded by pattern " + pattern ); // depends on control dependency: [if], data = [none]
excluded = true; // depends on control dependency: [if], data = [none]
break;
}
else
{
getLog().debug( "Jar " + name + " not excluded by pattern " + pattern ); // depends on control dependency: [if], data = [none]
}
}
}
if ( excluded )
{
continue;
}
if ( jarFile.isDirectory() )
{
getLog().debug( "Adding resources from jar folder : " + jarFile ); // depends on control dependency: [if], data = [none]
final String[] filenames = jarFile.list( new FilenameFilter()
{
public boolean accept( File dir, String name )
{
return PATTERN_JAR_EXT.matcher( name ).matches();
}
} );
for ( String filename : filenames )
{
final File innerJar = new File( jarFile, filename );
getLog().debug( "Adding resources from innerJar : " + innerJar ); // depends on control dependency: [for], data = [none]
apkBuilder.addResourcesFromJar( innerJar ); // depends on control dependency: [for], data = [none]
}
}
else
{
getLog().debug( "Adding resources from : " + jarFile ); // depends on control dependency: [if], data = [none]
apkBuilder.addResourcesFromJar( jarFile ); // depends on control dependency: [if], data = [none]
}
}
addSecondaryDexes( dexFile, apkBuilder );
for ( File nativeFolder : nativeFolders )
{
getLog().debug( "Adding native library : " + nativeFolder ); // depends on control dependency: [for], data = [nativeFolder]
apkBuilder.addNativeLibraries( nativeFolder ); // depends on control dependency: [for], data = [nativeFolder]
}
apkBuilder.sealApk();
}
catch ( ApkCreationException | SealedApkException | IOException e )
{
throw new MojoExecutionException( e.getMessage(), e );
}
catch ( DuplicateFileException e )
{
final String msg = String.format( "Duplicated file: %s, found in archive %s and %s",
e.getArchivePath(), e.getFile1(), e.getFile2() );
throw new MojoExecutionException( msg, e );
}
} } |
public class class_name {
static String cssContent(String token) {
int n = token.length();
int pos = 0;
StringBuilder sb = null;
if (n >= 2) {
char ch0 = token.charAt(0);
if (ch0 == '"' || ch0 == '\'') {
if (ch0 == token.charAt(n - 1)) {
pos = 1;
--n;
sb = new StringBuilder(n);
}
}
}
for (int esc; (esc = token.indexOf('\\', pos)) >= 0;) {
int end = esc + 2;
if (esc > n) { break; }
if (sb == null) { sb = new StringBuilder(n); }
sb.append(token, pos, esc);
int codepoint = token.charAt(end - 1);
if (isHex(codepoint)) {
// Parse \hhhhh<opt-break> where hhhhh is one or more hex digits
// and <opt-break> is an optional space or tab character that can be
// used to separate an escape sequence from a following literal hex
// digit.
while (end < n && isHex(token.charAt(end))) { ++end; }
try {
codepoint = Integer.parseInt(token.substring(esc + 1, end), 16);
} catch (RuntimeException ex) {
ignore(ex);
codepoint = 0xfffd; // Unknown codepoint.
}
if (end < n) {
char ch = token.charAt(end);
if (ch == ' ' || ch == '\t') { // Ignorable hex follower.
++end;
}
}
}
sb.appendCodePoint(codepoint);
pos = end;
}
if (sb == null) { return token; }
return sb.append(token, pos, n).toString();
} } | public class class_name {
static String cssContent(String token) {
int n = token.length();
int pos = 0;
StringBuilder sb = null;
if (n >= 2) {
char ch0 = token.charAt(0);
if (ch0 == '"' || ch0 == '\'') {
if (ch0 == token.charAt(n - 1)) {
pos = 1; // depends on control dependency: [if], data = [none]
--n; // depends on control dependency: [if], data = [none]
sb = new StringBuilder(n); // depends on control dependency: [if], data = [none]
}
}
}
for (int esc; (esc = token.indexOf('\\', pos)) >= 0;) {
int end = esc + 2;
if (esc > n) { break; }
if (sb == null) { sb = new StringBuilder(n); } // depends on control dependency: [if], data = [none]
sb.append(token, pos, esc); // depends on control dependency: [for], data = [none]
int codepoint = token.charAt(end - 1);
if (isHex(codepoint)) {
// Parse \hhhhh<opt-break> where hhhhh is one or more hex digits
// and <opt-break> is an optional space or tab character that can be
// used to separate an escape sequence from a following literal hex
// digit.
while (end < n && isHex(token.charAt(end))) { ++end; } // depends on control dependency: [while], data = [none]
try {
codepoint = Integer.parseInt(token.substring(esc + 1, end), 16); // depends on control dependency: [try], data = [none]
} catch (RuntimeException ex) {
ignore(ex);
codepoint = 0xfffd; // Unknown codepoint.
} // depends on control dependency: [catch], data = [none]
if (end < n) {
char ch = token.charAt(end);
if (ch == ' ' || ch == '\t') { // Ignorable hex follower.
++end; // depends on control dependency: [if], data = [none]
}
}
}
sb.appendCodePoint(codepoint); // depends on control dependency: [for], data = [none]
pos = end; // depends on control dependency: [for], data = [none]
}
if (sb == null) { return token; } // depends on control dependency: [if], data = [none]
return sb.append(token, pos, n).toString();
} } |
public class class_name {
public List<LifecycleCallbackType<SessionBeanType<T>>> getAllPostConstruct()
{
List<LifecycleCallbackType<SessionBeanType<T>>> list = new ArrayList<LifecycleCallbackType<SessionBeanType<T>>>();
List<Node> nodeList = childNode.get("post-construct");
for(Node node: nodeList)
{
LifecycleCallbackType<SessionBeanType<T>> type = new LifecycleCallbackTypeImpl<SessionBeanType<T>>(this, "post-construct", childNode, node);
list.add(type);
}
return list;
} } | public class class_name {
public List<LifecycleCallbackType<SessionBeanType<T>>> getAllPostConstruct()
{
List<LifecycleCallbackType<SessionBeanType<T>>> list = new ArrayList<LifecycleCallbackType<SessionBeanType<T>>>();
List<Node> nodeList = childNode.get("post-construct");
for(Node node: nodeList)
{
LifecycleCallbackType<SessionBeanType<T>> type = new LifecycleCallbackTypeImpl<SessionBeanType<T>>(this, "post-construct", childNode, node);
list.add(type); // depends on control dependency: [for], data = [none]
}
return list;
} } |
public class class_name {
private boolean macroCycleInversion(AtomPair pair) {
for (int v : pair.seqAt) {
IAtom atom = mol.getAtom(v);
if (!atom.isInRing() || adjList[v].length == 2)
continue;
if (atom.getProperty(MacroCycleLayout.MACROCYCLE_ATOM_HINT) == null)
continue;
final List<IBond> acyclic = new ArrayList<>(2);
final List<IBond> cyclic = new ArrayList<>(2);
for (int w : adjList[v]) {
IBond bond = bondMap.get(v, w);
if (bond.isInRing())
cyclic.add(bond);
else
acyclic.add(bond);
}
if (cyclic.size() > 2)
continue;
for (IBond bond : acyclic) {
if (bfix.contains(bond))
continue;
Arrays.fill(visited, false);
stackBackup.len = visit(visited, stackBackup.xs, v, idxs.get(bond.getOther(atom)), 0);
Point2d a = atom.getPoint2d();
Point2d b = bond.getOther(atom).getPoint2d();
Vector2d perp = new Vector2d(b.x - a.x, b.y - a.y);
perp.normalize();
double score = congestion.score();
backupCoords(backup, stackBackup);
reflect(stackBackup, new Point2d(a.x - perp.y, a.y + perp.x), new Point2d(a.x + perp.y, a.y - perp.x));
congestion.update(visited, stackBackup.xs, stackBackup.len);
if (percDiff(score, congestion.score()) >= IMPROVEMENT_PERC_THRESHOLD) {
return true;
}
restoreCoords(stackBackup, backup);
}
}
return false;
} } | public class class_name {
private boolean macroCycleInversion(AtomPair pair) {
for (int v : pair.seqAt) {
IAtom atom = mol.getAtom(v);
if (!atom.isInRing() || adjList[v].length == 2)
continue;
if (atom.getProperty(MacroCycleLayout.MACROCYCLE_ATOM_HINT) == null)
continue;
final List<IBond> acyclic = new ArrayList<>(2);
final List<IBond> cyclic = new ArrayList<>(2);
for (int w : adjList[v]) {
IBond bond = bondMap.get(v, w);
if (bond.isInRing())
cyclic.add(bond);
else
acyclic.add(bond);
}
if (cyclic.size() > 2)
continue;
for (IBond bond : acyclic) {
if (bfix.contains(bond))
continue;
Arrays.fill(visited, false); // depends on control dependency: [for], data = [none]
stackBackup.len = visit(visited, stackBackup.xs, v, idxs.get(bond.getOther(atom)), 0); // depends on control dependency: [for], data = [bond]
Point2d a = atom.getPoint2d();
Point2d b = bond.getOther(atom).getPoint2d();
Vector2d perp = new Vector2d(b.x - a.x, b.y - a.y);
perp.normalize(); // depends on control dependency: [for], data = [none]
double score = congestion.score();
backupCoords(backup, stackBackup); // depends on control dependency: [for], data = [none]
reflect(stackBackup, new Point2d(a.x - perp.y, a.y + perp.x), new Point2d(a.x + perp.y, a.y - perp.x)); // depends on control dependency: [for], data = [none]
congestion.update(visited, stackBackup.xs, stackBackup.len); // depends on control dependency: [for], data = [none]
if (percDiff(score, congestion.score()) >= IMPROVEMENT_PERC_THRESHOLD) {
return true; // depends on control dependency: [if], data = [none]
}
restoreCoords(stackBackup, backup); // depends on control dependency: [for], data = [none]
}
}
return false;
} } |
public class class_name {
@Override
public void decl(char[] buff, int offset, int length) throws SAXException {
String s = new String(buff, offset, length);
String name = null;
String systemid = null;
String publicid = null;
String[] v = split(s);
if (v.length > 0 && "DOCTYPE".equalsIgnoreCase(v[0])) {
if (theDoctypeIsPresent)
return; // one doctype only!
theDoctypeIsPresent = true;
if (v.length > 1) {
name = v[1];
if (v.length > 3 && "SYSTEM".equals(v[2])) {
systemid = v[3];
} else if (v.length > 3 && "PUBLIC".equals(v[2])) {
publicid = v[3];
if (v.length > 4) {
systemid = v[4];
} else {
systemid = "";
}
}
}
}
publicid = trimquotes(publicid);
systemid = trimquotes(systemid);
if (name != null) {
publicid = cleanPublicid(publicid);
theLexicalHandler.startDTD(name, publicid, systemid);
theLexicalHandler.endDTD();
theDoctypeName = name;
theDoctypePublicId = publicid;
if (theScanner instanceof Locator) { // Must resolve systemid
theDoctypeSystemId = ((Locator) theScanner).getSystemId();
try {
theDoctypeSystemId = new URL(new URL(theDoctypeSystemId), systemid).toString();
} catch (Exception ignore) {
}
}
}
} } | public class class_name {
@Override
public void decl(char[] buff, int offset, int length) throws SAXException {
String s = new String(buff, offset, length);
String name = null;
String systemid = null;
String publicid = null;
String[] v = split(s);
if (v.length > 0 && "DOCTYPE".equalsIgnoreCase(v[0])) {
if (theDoctypeIsPresent)
return; // one doctype only!
theDoctypeIsPresent = true;
if (v.length > 1) {
name = v[1]; // depends on control dependency: [if], data = [none]
if (v.length > 3 && "SYSTEM".equals(v[2])) {
systemid = v[3]; // depends on control dependency: [if], data = [none]
} else if (v.length > 3 && "PUBLIC".equals(v[2])) {
publicid = v[3]; // depends on control dependency: [if], data = [none]
if (v.length > 4) {
systemid = v[4]; // depends on control dependency: [if], data = [none]
} else {
systemid = ""; // depends on control dependency: [if], data = [none]
}
}
}
}
publicid = trimquotes(publicid);
systemid = trimquotes(systemid);
if (name != null) {
publicid = cleanPublicid(publicid);
theLexicalHandler.startDTD(name, publicid, systemid);
theLexicalHandler.endDTD();
theDoctypeName = name;
theDoctypePublicId = publicid;
if (theScanner instanceof Locator) { // Must resolve systemid
theDoctypeSystemId = ((Locator) theScanner).getSystemId();
try {
theDoctypeSystemId = new URL(new URL(theDoctypeSystemId), systemid).toString(); // depends on control dependency: [try], data = [none]
} catch (Exception ignore) {
} // depends on control dependency: [catch], data = [none]
}
}
} } |
public class class_name {
private XHyphenator GetHyphenator(XLinguServiceManager mxLinguSvcMgr) {
try {
if (mxLinguSvcMgr != null) {
return mxLinguSvcMgr.getHyphenator();
}
} catch (Throwable t) {
// If anything goes wrong, give the user a stack trace
printMessage(t);
}
return null;
} } | public class class_name {
private XHyphenator GetHyphenator(XLinguServiceManager mxLinguSvcMgr) {
try {
if (mxLinguSvcMgr != null) {
return mxLinguSvcMgr.getHyphenator(); // depends on control dependency: [if], data = [none]
}
} catch (Throwable t) {
// If anything goes wrong, give the user a stack trace
printMessage(t);
} // depends on control dependency: [catch], data = [none]
return null;
} } |
public class class_name {
public static List<GrantedAuthority> buildAuthorities(final List<CommonProfile> profiles) {
final List<GrantedAuthority> authorities = new ArrayList<>();
for (final CommonProfile profile : profiles) {
final Set<String> roles = profile.getRoles();
for (final String role : roles) {
authorities.add(new SimpleGrantedAuthority(role));
}
}
return authorities;
} } | public class class_name {
public static List<GrantedAuthority> buildAuthorities(final List<CommonProfile> profiles) {
final List<GrantedAuthority> authorities = new ArrayList<>();
for (final CommonProfile profile : profiles) {
final Set<String> roles = profile.getRoles();
for (final String role : roles) {
authorities.add(new SimpleGrantedAuthority(role)); // depends on control dependency: [for], data = [role]
}
}
return authorities;
} } |
public class class_name {
private void doXPathRes(final String resource, final Long revision, final OutputStream output,
final boolean nodeid, final String xpath) throws TTException {
// Storage connection to treetank
ISession session = null;
INodeReadTrx rtx = null;
try {
if (mDatabase.existsResource(resource)) {
session = mDatabase.getSession(new SessionConfiguration(resource, StandardSettings.KEY));
// Creating a transaction
if (revision == null) {
rtx = new NodeReadTrx(session.beginBucketRtx(session.getMostRecentVersion()));
} else {
rtx = new NodeReadTrx(session.beginBucketRtx(revision));
}
final AbsAxis axis = new XPathAxis(rtx, xpath);
for (final long key : axis) {
WorkerHelper.serializeXML(session, output, false, nodeid, key, revision).call();
}
}
} catch (final Exception globExcep) {
throw new WebApplicationException(globExcep, Response.Status.INTERNAL_SERVER_ERROR);
} finally {
WorkerHelper.closeRTX(rtx, session);
}
} } | public class class_name {
private void doXPathRes(final String resource, final Long revision, final OutputStream output,
final boolean nodeid, final String xpath) throws TTException {
// Storage connection to treetank
ISession session = null;
INodeReadTrx rtx = null;
try {
if (mDatabase.existsResource(resource)) {
session = mDatabase.getSession(new SessionConfiguration(resource, StandardSettings.KEY)); // depends on control dependency: [if], data = [none]
// Creating a transaction
if (revision == null) {
rtx = new NodeReadTrx(session.beginBucketRtx(session.getMostRecentVersion())); // depends on control dependency: [if], data = [none]
} else {
rtx = new NodeReadTrx(session.beginBucketRtx(revision)); // depends on control dependency: [if], data = [(revision]
}
final AbsAxis axis = new XPathAxis(rtx, xpath);
for (final long key : axis) {
WorkerHelper.serializeXML(session, output, false, nodeid, key, revision).call(); // depends on control dependency: [for], data = [key]
}
}
} catch (final Exception globExcep) {
throw new WebApplicationException(globExcep, Response.Status.INTERNAL_SERVER_ERROR);
} finally {
WorkerHelper.closeRTX(rtx, session);
}
} } |
public class class_name {
public boolean process() {
int numViews = depths.numRows;
int numFeatures = depths.numCols;
P.reshape(3*numViews,4);
X.reshape(4,numFeatures);
A.reshape(numViews*3,numFeatures);
B.reshape(numViews*3,numFeatures);
// Scale depths so that they are close to unity
normalizeDepths(depths);
// Compute the initial A matirx
assignValuesToA(A);
for (int iter = 0; iter < maxIterations; iter++) {
if( !svd.decompose(A) )
return false;
svd.getU(U,false);
svd.getV(Vt,true);
double sv[] = svd.getSingularValues();
SingularOps_DDRM.descendingOrder(U,false,sv,A.numCols,Vt,true);
// This is equivalent to forcing the rank to be 4
CommonOps_DDRM.extract(U,0,0,P);
CommonOps_DDRM.multCols(P,sv);
CommonOps_DDRM.extract(Vt,0,0,X);
// Compute the new value of A
CommonOps_DDRM.mult(P,X,B);
// See how much change there is
double delta = SpecializedOps_DDRM.diffNormF(A,B)/(A.numCols*A.numRows);
// swap arrays for the next iteration
DMatrixRMaj tmp = A;
A = B;
B = tmp;
// exit if converged
if( delta <= minimumChangeTol )
break;
}
return true;
} } | public class class_name {
public boolean process() {
int numViews = depths.numRows;
int numFeatures = depths.numCols;
P.reshape(3*numViews,4);
X.reshape(4,numFeatures);
A.reshape(numViews*3,numFeatures);
B.reshape(numViews*3,numFeatures);
// Scale depths so that they are close to unity
normalizeDepths(depths);
// Compute the initial A matirx
assignValuesToA(A);
for (int iter = 0; iter < maxIterations; iter++) {
if( !svd.decompose(A) )
return false;
svd.getU(U,false); // depends on control dependency: [for], data = [none]
svd.getV(Vt,true); // depends on control dependency: [for], data = [none]
double sv[] = svd.getSingularValues();
SingularOps_DDRM.descendingOrder(U,false,sv,A.numCols,Vt,true); // depends on control dependency: [for], data = [none]
// This is equivalent to forcing the rank to be 4
CommonOps_DDRM.extract(U,0,0,P); // depends on control dependency: [for], data = [none]
CommonOps_DDRM.multCols(P,sv); // depends on control dependency: [for], data = [none]
CommonOps_DDRM.extract(Vt,0,0,X); // depends on control dependency: [for], data = [none]
// Compute the new value of A
CommonOps_DDRM.mult(P,X,B); // depends on control dependency: [for], data = [none]
// See how much change there is
double delta = SpecializedOps_DDRM.diffNormF(A,B)/(A.numCols*A.numRows);
// swap arrays for the next iteration
DMatrixRMaj tmp = A;
A = B; // depends on control dependency: [for], data = [none]
B = tmp; // depends on control dependency: [for], data = [none]
// exit if converged
if( delta <= minimumChangeTol )
break;
}
return true;
} } |
public class class_name {
private void load() throws IOException
{
if (PrivilegedFileHelper.exists(storage))
{
InputStream in = PrivilegedFileHelper.fileInputStream(storage);
try
{
Properties props = new Properties();
log.debug("loading namespace mappings...");
props.load(in);
// read mappings from properties
Iterator<Object> iter = props.keySet().iterator();
while (iter.hasNext())
{
String prefix = (String)iter.next();
String uri = props.getProperty(prefix);
log.debug(prefix + " -> " + uri);
prefixToURI.put(prefix, uri);
uriToPrefix.put(uri, prefix);
}
prefixCount = props.size();
log.debug("namespace mappings loaded.");
}
finally
{
in.close();
}
}
} } | public class class_name {
private void load() throws IOException
{
if (PrivilegedFileHelper.exists(storage))
{
InputStream in = PrivilegedFileHelper.fileInputStream(storage);
try
{
Properties props = new Properties();
log.debug("loading namespace mappings..."); // depends on control dependency: [try], data = [none]
props.load(in); // depends on control dependency: [try], data = [none]
// read mappings from properties
Iterator<Object> iter = props.keySet().iterator();
while (iter.hasNext())
{
String prefix = (String)iter.next();
String uri = props.getProperty(prefix);
log.debug(prefix + " -> " + uri); // depends on control dependency: [while], data = [none]
prefixToURI.put(prefix, uri); // depends on control dependency: [while], data = [none]
uriToPrefix.put(uri, prefix); // depends on control dependency: [while], data = [none]
}
prefixCount = props.size(); // depends on control dependency: [try], data = [none]
log.debug("namespace mappings loaded."); // depends on control dependency: [try], data = [none]
}
finally
{
in.close();
}
}
} } |
public class class_name {
private static Token createDateToken(final String configuration) {
if (configuration == null) {
return new DateToken();
} else {
try {
return new DateToken(configuration);
} catch (IllegalArgumentException ex) {
InternalLogger.log(Level.ERROR, "'" + configuration + "' is an invalid date format pattern");
return new DateToken();
}
}
} } | public class class_name {
private static Token createDateToken(final String configuration) {
if (configuration == null) {
return new DateToken(); // depends on control dependency: [if], data = [none]
} else {
try {
return new DateToken(configuration); // depends on control dependency: [try], data = [none]
} catch (IllegalArgumentException ex) {
InternalLogger.log(Level.ERROR, "'" + configuration + "' is an invalid date format pattern");
return new DateToken();
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
private boolean isReadOnlyProcedure(String pname) {
final Boolean b = m_procedureInfo.get().get(pname);
if (b == null) {
return false;
}
return b;
} } | public class class_name {
private boolean isReadOnlyProcedure(String pname) {
final Boolean b = m_procedureInfo.get().get(pname);
if (b == null) {
return false; // depends on control dependency: [if], data = [none]
}
return b;
} } |
public class class_name {
@Override
public void onEndDocument(Document document) {
LOGGER.info("End document");
PHPContainer.dump();
if (container != null) {
container.close();
}
super.onEndDocument(document);
} } | public class class_name {
@Override
public void onEndDocument(Document document) {
LOGGER.info("End document");
PHPContainer.dump();
if (container != null) {
container.close(); // depends on control dependency: [if], data = [none]
}
super.onEndDocument(document);
} } |
public class class_name {
protected int countSteps(LocPathIterator lpi)
{
if(lpi instanceof WalkingIterator)
{
WalkingIterator wi = (WalkingIterator)lpi;
AxesWalker aw = wi.getFirstWalker();
int count = 0;
while(null != aw)
{
count++;
aw = aw.getNextWalker();
}
return count;
}
else
return 1;
} } | public class class_name {
protected int countSteps(LocPathIterator lpi)
{
if(lpi instanceof WalkingIterator)
{
WalkingIterator wi = (WalkingIterator)lpi;
AxesWalker aw = wi.getFirstWalker();
int count = 0;
while(null != aw)
{
count++; // depends on control dependency: [while], data = [none]
aw = aw.getNextWalker(); // depends on control dependency: [while], data = [none]
}
return count; // depends on control dependency: [if], data = [none]
}
else
return 1;
} } |
public class class_name {
public void doEvents() {
while (!isInterrupted()) {
Iterator<Map.Entry<Timer, Runnable>> it = timerMap.entrySet().iterator();
if (it.hasNext()) {
long timeout = it.next().getKey().uptime - System.currentTimeMillis();
doEvents(timeout > 0 ? timeout : 0);
} else {
doEvents(-1);
}
}
} } | public class class_name {
public void doEvents() {
while (!isInterrupted()) {
Iterator<Map.Entry<Timer, Runnable>> it = timerMap.entrySet().iterator();
if (it.hasNext()) {
long timeout = it.next().getKey().uptime - System.currentTimeMillis();
doEvents(timeout > 0 ? timeout : 0);
// depends on control dependency: [if], data = [none]
} else {
doEvents(-1);
// depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public static boolean isAvailable()
{
try {
return create() != null;
} catch (Exception e) {
log.log(Level.FINEST, e.toString(), e);
return false;
}
} } | public class class_name {
public static boolean isAvailable()
{
try {
return create() != null; // depends on control dependency: [try], data = [none]
} catch (Exception e) {
log.log(Level.FINEST, e.toString(), e);
return false;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private int[] linearize(final FSA fsa) {
int[] linearized = new int[0];
int last = 0;
BitSet visited = new BitSet();
IntStack nodes = new IntStack();
nodes.push(fsa.getRootNode());
while (!nodes.isEmpty()) {
final int node = nodes.pop();
if (visited.get(node)) {
continue;
}
if (last >= linearized.length) {
linearized = Arrays.copyOf(linearized, linearized.length + 100000);
}
visited.set(node);
linearized[last++] = node;
for (int arc = fsa.getFirstArc(node); arc != 0; arc = fsa.getNextArc(arc)) {
if (!fsa.isArcTerminal(arc)) {
int target = fsa.getEndNode(arc);
if (!visited.get(target))
nodes.push(target);
}
}
}
return Arrays.copyOf(linearized, last);
} } | public class class_name {
private int[] linearize(final FSA fsa) {
int[] linearized = new int[0];
int last = 0;
BitSet visited = new BitSet();
IntStack nodes = new IntStack();
nodes.push(fsa.getRootNode());
while (!nodes.isEmpty()) {
final int node = nodes.pop();
if (visited.get(node)) {
continue;
}
if (last >= linearized.length) {
linearized = Arrays.copyOf(linearized, linearized.length + 100000);
// depends on control dependency: [if], data = [none]
}
visited.set(node);
// depends on control dependency: [while], data = [none]
linearized[last++] = node;
// depends on control dependency: [while], data = [none]
for (int arc = fsa.getFirstArc(node); arc != 0; arc = fsa.getNextArc(arc)) {
if (!fsa.isArcTerminal(arc)) {
int target = fsa.getEndNode(arc);
if (!visited.get(target))
nodes.push(target);
}
}
}
return Arrays.copyOf(linearized, last);
} } |
public class class_name {
private Map<String, String> getContentProperties(final StorageProvider store,
final String spaceId,
final String contentId)
throws TaskExecutionFailedException {
try {
return new Retrier().execute(new Retriable() {
@Override
public Map<String, String> retry() throws Exception {
// The actual method being executed
return store.getContentProperties(spaceId, contentId);
}
}, new ExceptionHandler() {
@Override
public void handle(Exception ex) {
if (!(ex instanceof NotFoundException)) {
log.debug(ex.getMessage(), ex);
} else {
log.debug("retry attempt failed but probably not an issue: {}", ex.getMessage());
}
}
});
} catch (NotFoundException nfe) {
return null;
} catch (Exception e) {
String msg = "Error attempting to retrieve content properties: " + e.getMessage();
throw new DuplicationTaskExecutionFailedException(buildFailureMessage(msg), e);
}
} } | public class class_name {
private Map<String, String> getContentProperties(final StorageProvider store,
final String spaceId,
final String contentId)
throws TaskExecutionFailedException {
try {
return new Retrier().execute(new Retriable() {
@Override
public Map<String, String> retry() throws Exception {
// The actual method being executed
return store.getContentProperties(spaceId, contentId);
}
}, new ExceptionHandler() {
@Override
public void handle(Exception ex) {
if (!(ex instanceof NotFoundException)) {
log.debug(ex.getMessage(), ex); // depends on control dependency: [if], data = [none]
} else {
log.debug("retry attempt failed but probably not an issue: {}", ex.getMessage()); // depends on control dependency: [if], data = [none]
}
}
});
} catch (NotFoundException nfe) {
return null;
} catch (Exception e) {
String msg = "Error attempting to retrieve content properties: " + e.getMessage();
throw new DuplicationTaskExecutionFailedException(buildFailureMessage(msg), e);
}
} } |
public class class_name {
public static AbstractInsnNode findInstruction(MethodNode method, InsnList matches, int index)
{
AbstractInsnNode node = method.instructions.get(index);
AbstractInsnNode match = matches.getFirst();
while (node != null)
{
if (insnEqual(node, match))
{
AbstractInsnNode m = match.getNext();
AbstractInsnNode n = node.getNext();
while (m != null && n != null && insnEqual(m, n))
{
m = m.getNext();
n = n.getNext();
}
if (m == null)
return node;
}
node = node.getNext();
}
return null;
} } | public class class_name {
public static AbstractInsnNode findInstruction(MethodNode method, InsnList matches, int index)
{
AbstractInsnNode node = method.instructions.get(index);
AbstractInsnNode match = matches.getFirst();
while (node != null)
{
if (insnEqual(node, match))
{
AbstractInsnNode m = match.getNext();
AbstractInsnNode n = node.getNext();
while (m != null && n != null && insnEqual(m, n))
{
m = m.getNext(); // depends on control dependency: [while], data = [none]
n = n.getNext(); // depends on control dependency: [while], data = [none]
}
if (m == null)
return node;
}
node = node.getNext(); // depends on control dependency: [while], data = [none]
}
return null;
} } |
public class class_name {
@Override
public CommerceNotificationAttachment fetchByPrimaryKey(
Serializable primaryKey) {
Serializable serializable = entityCache.getResult(CommerceNotificationAttachmentModelImpl.ENTITY_CACHE_ENABLED,
CommerceNotificationAttachmentImpl.class, primaryKey);
if (serializable == nullModel) {
return null;
}
CommerceNotificationAttachment commerceNotificationAttachment = (CommerceNotificationAttachment)serializable;
if (commerceNotificationAttachment == null) {
Session session = null;
try {
session = openSession();
commerceNotificationAttachment = (CommerceNotificationAttachment)session.get(CommerceNotificationAttachmentImpl.class,
primaryKey);
if (commerceNotificationAttachment != null) {
cacheResult(commerceNotificationAttachment);
}
else {
entityCache.putResult(CommerceNotificationAttachmentModelImpl.ENTITY_CACHE_ENABLED,
CommerceNotificationAttachmentImpl.class, primaryKey,
nullModel);
}
}
catch (Exception e) {
entityCache.removeResult(CommerceNotificationAttachmentModelImpl.ENTITY_CACHE_ENABLED,
CommerceNotificationAttachmentImpl.class, primaryKey);
throw processException(e);
}
finally {
closeSession(session);
}
}
return commerceNotificationAttachment;
} } | public class class_name {
@Override
public CommerceNotificationAttachment fetchByPrimaryKey(
Serializable primaryKey) {
Serializable serializable = entityCache.getResult(CommerceNotificationAttachmentModelImpl.ENTITY_CACHE_ENABLED,
CommerceNotificationAttachmentImpl.class, primaryKey);
if (serializable == nullModel) {
return null; // depends on control dependency: [if], data = [none]
}
CommerceNotificationAttachment commerceNotificationAttachment = (CommerceNotificationAttachment)serializable;
if (commerceNotificationAttachment == null) {
Session session = null;
try {
session = openSession(); // depends on control dependency: [try], data = [none]
commerceNotificationAttachment = (CommerceNotificationAttachment)session.get(CommerceNotificationAttachmentImpl.class,
primaryKey); // depends on control dependency: [try], data = [none]
if (commerceNotificationAttachment != null) {
cacheResult(commerceNotificationAttachment); // depends on control dependency: [if], data = [(commerceNotificationAttachment]
}
else {
entityCache.putResult(CommerceNotificationAttachmentModelImpl.ENTITY_CACHE_ENABLED,
CommerceNotificationAttachmentImpl.class, primaryKey,
nullModel); // depends on control dependency: [if], data = [none]
}
}
catch (Exception e) {
entityCache.removeResult(CommerceNotificationAttachmentModelImpl.ENTITY_CACHE_ENABLED,
CommerceNotificationAttachmentImpl.class, primaryKey);
throw processException(e);
} // depends on control dependency: [catch], data = [none]
finally {
closeSession(session);
}
}
return commerceNotificationAttachment;
} } |
public class class_name {
@Override
public Class resolveType(Object obj) {
if (obj instanceof Number) {
Number value = (Number) obj;
if (isDecimal(value)) {
return (isFloat(value) ? Float.class : Double.class);
}
else {
return (isByte(value) ? Byte.class
: (isShort(value) ? Short.class
: (isInteger(value) ? Integer.class : Long.class)));
}
}
return super.resolveType(obj);
} } | public class class_name {
@Override
public Class resolveType(Object obj) {
if (obj instanceof Number) {
Number value = (Number) obj;
if (isDecimal(value)) {
return (isFloat(value) ? Float.class : Double.class); // depends on control dependency: [if], data = [none]
}
else {
return (isByte(value) ? Byte.class
: (isShort(value) ? Short.class
: (isInteger(value) ? Integer.class : Long.class))); // depends on control dependency: [if], data = [none]
}
}
return super.resolveType(obj);
} } |
public class class_name {
public boolean isLineOfCode(final String line) {
if (line.length() < getIndicatorAreaPos() + 1) {
return false;
}
/* Remove white space lines */
if (line.trim().length() == 0) {
return false;
}
/* Remove comments and special lines */
if (isComment(line)) {
return false;
}
/*
* If there is a single token on this line, make sure it is not a
* compiler directive.
*/
String[] tokens = line.trim().split("[\\s\\.]+");
if (tokens.length == 1
&& COMPILER_DIRECTIVES.contains(tokens[0].toUpperCase(Locale
.getDefault()))) {
return false;
}
return true;
} } | public class class_name {
public boolean isLineOfCode(final String line) {
if (line.length() < getIndicatorAreaPos() + 1) {
return false; // depends on control dependency: [if], data = [none]
}
/* Remove white space lines */
if (line.trim().length() == 0) {
return false; // depends on control dependency: [if], data = [none]
}
/* Remove comments and special lines */
if (isComment(line)) {
return false; // depends on control dependency: [if], data = [none]
}
/*
* If there is a single token on this line, make sure it is not a
* compiler directive.
*/
String[] tokens = line.trim().split("[\\s\\.]+");
if (tokens.length == 1
&& COMPILER_DIRECTIVES.contains(tokens[0].toUpperCase(Locale
.getDefault()))) {
return false; // depends on control dependency: [if], data = [none]
}
return true;
} } |
public class class_name {
public boolean setFirstChild(N newChild) {
final N oldChild = this.nNorthWest;
if (oldChild == newChild) {
return false;
}
if (oldChild != null) {
oldChild.setParentNodeReference(null, true);
--this.notNullChildCount;
firePropertyChildRemoved(0, oldChild);
}
if (newChild != null) {
final N oldParent = newChild.getParentNode();
if (oldParent != this) {
newChild.removeFromParent();
}
}
this.nNorthWest = newChild;
if (newChild != null) {
newChild.setParentNodeReference(toN(), true);
++this.notNullChildCount;
firePropertyChildAdded(0, newChild);
}
return true;
} } | public class class_name {
public boolean setFirstChild(N newChild) {
final N oldChild = this.nNorthWest;
if (oldChild == newChild) {
return false; // depends on control dependency: [if], data = [none]
}
if (oldChild != null) {
oldChild.setParentNodeReference(null, true); // depends on control dependency: [if], data = [none]
--this.notNullChildCount; // depends on control dependency: [if], data = [none]
firePropertyChildRemoved(0, oldChild); // depends on control dependency: [if], data = [none]
}
if (newChild != null) {
final N oldParent = newChild.getParentNode();
if (oldParent != this) {
newChild.removeFromParent(); // depends on control dependency: [if], data = [none]
}
}
this.nNorthWest = newChild;
if (newChild != null) {
newChild.setParentNodeReference(toN(), true); // depends on control dependency: [if], data = [none]
++this.notNullChildCount; // depends on control dependency: [if], data = [none]
firePropertyChildAdded(0, newChild); // depends on control dependency: [if], data = [none]
}
return true;
} } |
public class class_name {
protected void deletePath(IndexTreePath<E> deletionPath) {
N leaf = getNode(deletionPath.getParentPath().getEntry());
int index = deletionPath.getIndex();
// delete o
E entry = leaf.getEntry(index);
leaf.deleteEntry(index);
writeNode(leaf);
// condense the tree
Stack<N> stack = new Stack<>();
condenseTree(deletionPath.getParentPath(), stack);
// reinsert underflow nodes
while(!stack.empty()) {
N node = stack.pop();
if(node.isLeaf()) {
for(int i = 0; i < node.getNumEntries(); i++) {
settings.getOverflowTreatment().reinitialize(); // Intended?
this.insertLeafEntry(node.getEntry(i));
}
}
else {
for(int i = 0; i < node.getNumEntries(); i++) {
stack.push(getNode(node.getEntry(i)));
}
}
deleteNode(node);
}
postDelete(entry);
doExtraIntegrityChecks();
} } | public class class_name {
protected void deletePath(IndexTreePath<E> deletionPath) {
N leaf = getNode(deletionPath.getParentPath().getEntry());
int index = deletionPath.getIndex();
// delete o
E entry = leaf.getEntry(index);
leaf.deleteEntry(index);
writeNode(leaf);
// condense the tree
Stack<N> stack = new Stack<>();
condenseTree(deletionPath.getParentPath(), stack);
// reinsert underflow nodes
while(!stack.empty()) {
N node = stack.pop();
if(node.isLeaf()) {
for(int i = 0; i < node.getNumEntries(); i++) {
settings.getOverflowTreatment().reinitialize(); // Intended? // depends on control dependency: [for], data = [none]
this.insertLeafEntry(node.getEntry(i)); // depends on control dependency: [for], data = [i]
}
}
else {
for(int i = 0; i < node.getNumEntries(); i++) {
stack.push(getNode(node.getEntry(i))); // depends on control dependency: [for], data = [i]
}
}
deleteNode(node); // depends on control dependency: [while], data = [none]
}
postDelete(entry);
doExtraIntegrityChecks();
} } |
public class class_name {
public static XElement storeList(String container, String item, Iterable<? extends XSerializable> source) {
XElement result = new XElement(container);
for (XSerializable e : source) {
e.save(result.add(item));
}
return result;
} } | public class class_name {
public static XElement storeList(String container, String item, Iterable<? extends XSerializable> source) {
XElement result = new XElement(container);
for (XSerializable e : source) {
e.save(result.add(item)); // depends on control dependency: [for], data = [e]
}
return result;
} } |
public class class_name {
public static void addMarkerAsPolygon(Marker marker, List<Marker> markers) {
LatLng position = marker.getPosition();
int insertLocation = markers.size();
if (markers.size() > 2) {
double[] distances = new double[markers.size()];
insertLocation = 0;
distances[0] = SphericalUtil.computeDistanceBetween(position,
markers.get(0).getPosition());
for (int i = 1; i < markers.size(); i++) {
distances[i] = SphericalUtil.computeDistanceBetween(position,
markers.get(i).getPosition());
if (distances[i] < distances[insertLocation]) {
insertLocation = i;
}
}
int beforeLocation = insertLocation > 0 ? insertLocation - 1
: distances.length - 1;
int afterLocation = insertLocation < distances.length - 1 ? insertLocation + 1
: 0;
if (distances[beforeLocation] > distances[afterLocation]) {
insertLocation = afterLocation;
}
}
markers.add(insertLocation, marker);
} } | public class class_name {
public static void addMarkerAsPolygon(Marker marker, List<Marker> markers) {
LatLng position = marker.getPosition();
int insertLocation = markers.size();
if (markers.size() > 2) {
double[] distances = new double[markers.size()];
insertLocation = 0; // depends on control dependency: [if], data = [none]
distances[0] = SphericalUtil.computeDistanceBetween(position,
markers.get(0).getPosition()); // depends on control dependency: [if], data = [none]
for (int i = 1; i < markers.size(); i++) {
distances[i] = SphericalUtil.computeDistanceBetween(position,
markers.get(i).getPosition()); // depends on control dependency: [for], data = [i]
if (distances[i] < distances[insertLocation]) {
insertLocation = i; // depends on control dependency: [if], data = [none]
}
}
int beforeLocation = insertLocation > 0 ? insertLocation - 1
: distances.length - 1;
int afterLocation = insertLocation < distances.length - 1 ? insertLocation + 1
: 0;
if (distances[beforeLocation] > distances[afterLocation]) {
insertLocation = afterLocation; // depends on control dependency: [if], data = [none]
}
}
markers.add(insertLocation, marker);
} } |
public class class_name {
public SSLSocketFactory getSSLSocketFactory(Map<String, Object> connectionInfo, Properties props) throws SSLException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.entry(tc, "getSSLSocketFactory");
try {
SSLConfig newConfig = null;
String contextProvider = null;
if (props != null && !props.isEmpty()) {
newConfig = new SSLConfig(props);
contextProvider = newConfig.getProperty(Constants.SSLPROP_CONTEXT_PROVIDER);
} else {
if (connectionInfo == null || connectionInfo.isEmpty()) {
connectionInfo = new HashMap<String, Object>();
connectionInfo.put(Constants.CONNECTION_INFO_DIRECTION, Constants.DIRECTION_OUTBOUND);
if (newConfig == null) {
Properties sslProps = getProperties(null, connectionInfo, null, true);
newConfig = new SSLConfig(sslProps);
contextProvider = sslProps.getProperty(Constants.SSLPROP_CONTEXT_PROVIDER);
}
}
}
SSLSocketFactory rc = JSSEProviderFactory.getInstance(contextProvider).getSSLSocketFactory(connectionInfo, newConfig);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(tc, "getSSLSocketFactory: " + rc);
return rc;
} catch (Exception e) {
FFDCFilter.processException(e, getClass().getName(), "getSSLSocketFactory", this);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(tc, "The following exception occurred in getSSLSocketFactory().", new Object[] { e });
throw asSSLException(e);
}
} } | public class class_name {
public SSLSocketFactory getSSLSocketFactory(Map<String, Object> connectionInfo, Properties props) throws SSLException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.entry(tc, "getSSLSocketFactory");
try {
SSLConfig newConfig = null;
String contextProvider = null;
if (props != null && !props.isEmpty()) {
newConfig = new SSLConfig(props); // depends on control dependency: [if], data = [(props]
contextProvider = newConfig.getProperty(Constants.SSLPROP_CONTEXT_PROVIDER); // depends on control dependency: [if], data = [none]
} else {
if (connectionInfo == null || connectionInfo.isEmpty()) {
connectionInfo = new HashMap<String, Object>(); // depends on control dependency: [if], data = [none]
connectionInfo.put(Constants.CONNECTION_INFO_DIRECTION, Constants.DIRECTION_OUTBOUND); // depends on control dependency: [if], data = [none]
if (newConfig == null) {
Properties sslProps = getProperties(null, connectionInfo, null, true);
newConfig = new SSLConfig(sslProps); // depends on control dependency: [if], data = [none]
contextProvider = sslProps.getProperty(Constants.SSLPROP_CONTEXT_PROVIDER); // depends on control dependency: [if], data = [none]
}
}
}
SSLSocketFactory rc = JSSEProviderFactory.getInstance(contextProvider).getSSLSocketFactory(connectionInfo, newConfig);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(tc, "getSSLSocketFactory: " + rc);
return rc;
} catch (Exception e) {
FFDCFilter.processException(e, getClass().getName(), "getSSLSocketFactory", this);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(tc, "The following exception occurred in getSSLSocketFactory().", new Object[] { e });
throw asSSLException(e);
}
} } |
public class class_name {
private <T> T processFunction(SplitFunction<T> function) {
synchronized (splits) {
if (splits.isEmpty()) {
return null;
}
for (Split split : splits) {
function.evaluate(split);
}
return function.result();
}
} } | public class class_name {
private <T> T processFunction(SplitFunction<T> function) {
synchronized (splits) {
if (splits.isEmpty()) {
return null;
// depends on control dependency: [if], data = [none]
}
for (Split split : splits) {
function.evaluate(split);
// depends on control dependency: [for], data = [split]
}
return function.result();
}
} } |
public class class_name {
public static Polygon lines2Polygon( boolean checkValid, LineString... lines ) {
List<Coordinate> coordinatesList = new ArrayList<Coordinate>();
List<LineString> linesList = new ArrayList<LineString>();
for( LineString tmpLine : lines ) {
linesList.add(tmpLine);
}
LineString currentLine = linesList.get(0);
linesList.remove(0);
while( linesList.size() > 0 ) {
Coordinate[] coordinates = currentLine.getCoordinates();
List<Coordinate> tmpList = Arrays.asList(coordinates);
coordinatesList.addAll(tmpList);
Point thePoint = currentLine.getEndPoint();
double minDistance = Double.MAX_VALUE;
LineString minDistanceLine = null;
boolean needFlip = false;
for( LineString tmpLine : linesList ) {
Point tmpStartPoint = tmpLine.getStartPoint();
double distance = thePoint.distance(tmpStartPoint);
if (distance < minDistance) {
minDistance = distance;
minDistanceLine = tmpLine;
needFlip = false;
}
Point tmpEndPoint = tmpLine.getEndPoint();
distance = thePoint.distance(tmpEndPoint);
if (distance < minDistance) {
minDistance = distance;
minDistanceLine = tmpLine;
needFlip = true;
}
}
linesList.remove(minDistanceLine);
if (needFlip) {
minDistanceLine = (LineString) minDistanceLine.reverse();
}
currentLine = minDistanceLine;
}
// add last
Coordinate[] coordinates = currentLine.getCoordinates();
List<Coordinate> tmpList = Arrays.asList(coordinates);
coordinatesList.addAll(tmpList);
coordinatesList.add(coordinatesList.get(0));
LinearRing linearRing = gf().createLinearRing(coordinatesList.toArray(new Coordinate[0]));
Polygon polygon = gf().createPolygon(linearRing, null);
if (checkValid) {
if (!polygon.isValid()) {
return null;
}
}
return polygon;
} } | public class class_name {
public static Polygon lines2Polygon( boolean checkValid, LineString... lines ) {
List<Coordinate> coordinatesList = new ArrayList<Coordinate>();
List<LineString> linesList = new ArrayList<LineString>();
for( LineString tmpLine : lines ) {
linesList.add(tmpLine); // depends on control dependency: [for], data = [tmpLine]
}
LineString currentLine = linesList.get(0);
linesList.remove(0);
while( linesList.size() > 0 ) {
Coordinate[] coordinates = currentLine.getCoordinates();
List<Coordinate> tmpList = Arrays.asList(coordinates);
coordinatesList.addAll(tmpList); // depends on control dependency: [while], data = [none]
Point thePoint = currentLine.getEndPoint();
double minDistance = Double.MAX_VALUE;
LineString minDistanceLine = null;
boolean needFlip = false;
for( LineString tmpLine : linesList ) {
Point tmpStartPoint = tmpLine.getStartPoint();
double distance = thePoint.distance(tmpStartPoint);
if (distance < minDistance) {
minDistance = distance; // depends on control dependency: [if], data = [none]
minDistanceLine = tmpLine; // depends on control dependency: [if], data = [none]
needFlip = false; // depends on control dependency: [if], data = [none]
}
Point tmpEndPoint = tmpLine.getEndPoint();
distance = thePoint.distance(tmpEndPoint); // depends on control dependency: [for], data = [none]
if (distance < minDistance) {
minDistance = distance; // depends on control dependency: [if], data = [none]
minDistanceLine = tmpLine; // depends on control dependency: [if], data = [none]
needFlip = true; // depends on control dependency: [if], data = [none]
}
}
linesList.remove(minDistanceLine); // depends on control dependency: [while], data = [none]
if (needFlip) {
minDistanceLine = (LineString) minDistanceLine.reverse(); // depends on control dependency: [if], data = [none]
}
currentLine = minDistanceLine; // depends on control dependency: [while], data = [none]
}
// add last
Coordinate[] coordinates = currentLine.getCoordinates();
List<Coordinate> tmpList = Arrays.asList(coordinates);
coordinatesList.addAll(tmpList);
coordinatesList.add(coordinatesList.get(0));
LinearRing linearRing = gf().createLinearRing(coordinatesList.toArray(new Coordinate[0]));
Polygon polygon = gf().createPolygon(linearRing, null);
if (checkValid) {
if (!polygon.isValid()) {
return null; // depends on control dependency: [if], data = [none]
}
}
return polygon;
} } |
public class class_name {
public static double evaluate(Phylogeny tree, DistanceMatrix matrix) {
int numSequences = matrix.getSize();
List<PhylogenyNode> externalNodes = tree.getExternalNodes();
HashMap<String, PhylogenyNode> externalNodesHashMap = new HashMap<String, PhylogenyNode>();
Set<PhylogenyNode> path = new HashSet<PhylogenyNode>();
for (PhylogenyNode node : externalNodes) {
externalNodesHashMap.put(node.getName(), node);
}
int count = 0;
double averageMatrixDistance = 0.0;
double averageTreeDistance = 0.0;
double averageTreeErrorDistance = 0.0;
for (int row = 0; row < numSequences - 1; row++) {
String nodeName1 = matrix.getIdentifier(row);
PhylogenyNode node1 = externalNodesHashMap.get(nodeName1);
markPathToRoot(node1, path);
for (int col = row + 1; col < numSequences; col++) {
count++;
String nodeName2 = matrix.getIdentifier(col);
PhylogenyNode node2 = externalNodesHashMap.get(nodeName2);
double distance = matrix.getValue(col, row);
averageMatrixDistance = averageMatrixDistance + distance;
PhylogenyNode commonParent = findCommonParent(node2, path);
if (commonParent != null) {
double treeDistance = getNodeDistance(commonParent, node1)
+ getNodeDistance(commonParent, node2);
averageTreeDistance += treeDistance;
averageTreeErrorDistance += (distance - treeDistance)
* (distance - treeDistance);
logger.info("{} {} Distance: {}Tree: {} difference: {}",
nodeName1, nodeName2, distance, treeDistance,
Math.abs(distance - treeDistance));
} else {
logger.warn("Unable to find common parent with {} {}",
node1, node2);
}
}
path.clear();
}
averageMatrixDistance /= count;
averageTreeDistance /= count;
averageTreeErrorDistance /= count;
logger.info("Average matrix distance: {}", averageMatrixDistance);
logger.info("Average tree distance: {}", averageTreeDistance);
logger.info("Average LS error: {}", averageTreeErrorDistance);
return Math.sqrt(averageTreeErrorDistance) / averageMatrixDistance;
} } | public class class_name {
public static double evaluate(Phylogeny tree, DistanceMatrix matrix) {
int numSequences = matrix.getSize();
List<PhylogenyNode> externalNodes = tree.getExternalNodes();
HashMap<String, PhylogenyNode> externalNodesHashMap = new HashMap<String, PhylogenyNode>();
Set<PhylogenyNode> path = new HashSet<PhylogenyNode>();
for (PhylogenyNode node : externalNodes) {
externalNodesHashMap.put(node.getName(), node); // depends on control dependency: [for], data = [node]
}
int count = 0;
double averageMatrixDistance = 0.0;
double averageTreeDistance = 0.0;
double averageTreeErrorDistance = 0.0;
for (int row = 0; row < numSequences - 1; row++) {
String nodeName1 = matrix.getIdentifier(row);
PhylogenyNode node1 = externalNodesHashMap.get(nodeName1);
markPathToRoot(node1, path);
for (int col = row + 1; col < numSequences; col++) {
count++;
String nodeName2 = matrix.getIdentifier(col);
PhylogenyNode node2 = externalNodesHashMap.get(nodeName2);
double distance = matrix.getValue(col, row);
averageMatrixDistance = averageMatrixDistance + distance;
PhylogenyNode commonParent = findCommonParent(node2, path);
if (commonParent != null) {
double treeDistance = getNodeDistance(commonParent, node1)
+ getNodeDistance(commonParent, node2);
averageTreeDistance += treeDistance;
averageTreeErrorDistance += (distance - treeDistance)
* (distance - treeDistance);
logger.info("{} {} Distance: {}Tree: {} difference: {}",
nodeName1, nodeName2, distance, treeDistance,
Math.abs(distance - treeDistance));
} else {
logger.warn("Unable to find common parent with {} {}",
node1, node2);
}
}
path.clear();
}
averageMatrixDistance /= count;
averageTreeDistance /= count;
averageTreeErrorDistance /= count;
logger.info("Average matrix distance: {}", averageMatrixDistance);
logger.info("Average tree distance: {}", averageTreeDistance);
logger.info("Average LS error: {}", averageTreeErrorDistance);
return Math.sqrt(averageTreeErrorDistance) / averageMatrixDistance;
} } |
public class class_name {
public void marshall(KinesisStreamsOutput kinesisStreamsOutput, ProtocolMarshaller protocolMarshaller) {
if (kinesisStreamsOutput == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(kinesisStreamsOutput.getResourceARN(), RESOURCEARN_BINDING);
protocolMarshaller.marshall(kinesisStreamsOutput.getRoleARN(), ROLEARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(KinesisStreamsOutput kinesisStreamsOutput, ProtocolMarshaller protocolMarshaller) {
if (kinesisStreamsOutput == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(kinesisStreamsOutput.getResourceARN(), RESOURCEARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(kinesisStreamsOutput.getRoleARN(), ROLEARN_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public Fraction multipliedBy(final Fraction fraction) {
if (fraction == null) {
throw new IllegalArgumentException("The fraction must not be null");
}
if (numerator == 0 || fraction.numerator == 0) {
return ZERO;
}
// knuth 4.5.1
// make sure we don't overflow unless the result *must* overflow.
final int d1 = greatestCommonDivisor(numerator, fraction.denominator);
final int d2 = greatestCommonDivisor(fraction.numerator, denominator);
return of(mulAndCheck(numerator / d1, fraction.numerator / d2), mulPosAndCheck(denominator / d2, fraction.denominator / d1), true);
} } | public class class_name {
public Fraction multipliedBy(final Fraction fraction) {
if (fraction == null) {
throw new IllegalArgumentException("The fraction must not be null");
}
if (numerator == 0 || fraction.numerator == 0) {
return ZERO; // depends on control dependency: [if], data = [none]
}
// knuth 4.5.1
// make sure we don't overflow unless the result *must* overflow.
final int d1 = greatestCommonDivisor(numerator, fraction.denominator);
final int d2 = greatestCommonDivisor(fraction.numerator, denominator);
return of(mulAndCheck(numerator / d1, fraction.numerator / d2), mulPosAndCheck(denominator / d2, fraction.denominator / d1), true);
} } |
public class class_name {
public boolean remove() {
if (exists() && !isRemoved()) {
isRemoved = true;
node.getParent().removeChild(nodeFqn.getLastElement());
if (doTraceLogs) {
logger.trace("removed cache node "+nodeFqn);
}
return true;
}
else {
return false;
}
} } | public class class_name {
public boolean remove() {
if (exists() && !isRemoved()) {
isRemoved = true; // depends on control dependency: [if], data = [none]
node.getParent().removeChild(nodeFqn.getLastElement()); // depends on control dependency: [if], data = [none]
if (doTraceLogs) {
logger.trace("removed cache node "+nodeFqn); // depends on control dependency: [if], data = [none]
}
return true; // depends on control dependency: [if], data = [none]
}
else {
return false; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
INodeFile getFileINode(byte[][] components) {
readLock();
try {
INode inode = rootDir.getNode(components);
if (inode == null || inode.isDirectory())
return null;
return (INodeFile)inode;
} finally {
readUnlock();
}
} } | public class class_name {
INodeFile getFileINode(byte[][] components) {
readLock();
try {
INode inode = rootDir.getNode(components);
if (inode == null || inode.isDirectory())
return null;
return (INodeFile)inode; // depends on control dependency: [try], data = [none]
} finally {
readUnlock();
}
} } |
public class class_name {
public ChannelTermination setup(ChannelData chanData, TCPChannelConfiguration oTCPChannelConfig, TCPChannelFactory _f) throws ChannelException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.entry(tc, "setup");
}
super.setup(chanData, oTCPChannelConfig, _f);
// try to load the AsyncLibrary. It will throw an exception if it can't load
try {
IAsyncProvider provider = AsyncLibrary.createInstance();
if (getConfig().getAllocateBuffersDirect()) {
jitSupportedByNative = provider.hasCapability(IAsyncProvider.CAP_JIT_BUFFERS);
} else {
jitSupportedByNative = false;
}
} catch (AsyncException ae) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "AioTCPChannel couldn't load native AIO library: " + ae.getMessage());
}
throw new ChannelException(ae);
}
if (!getConfig().isInbound()) {
boolean startSelectors = false;
if (wqm == null) {
wqm = new AioWorkQueueManager();
startSelectors = true;
}
super.connectionManager = new ConnectionManager(this, wqm);
if (startSelectors) {
wqm.startSelectors(false);
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.exit(tc, "setup");
}
return this;
} } | public class class_name {
public ChannelTermination setup(ChannelData chanData, TCPChannelConfiguration oTCPChannelConfig, TCPChannelFactory _f) throws ChannelException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.entry(tc, "setup");
}
super.setup(chanData, oTCPChannelConfig, _f);
// try to load the AsyncLibrary. It will throw an exception if it can't load
try {
IAsyncProvider provider = AsyncLibrary.createInstance();
if (getConfig().getAllocateBuffersDirect()) {
jitSupportedByNative = provider.hasCapability(IAsyncProvider.CAP_JIT_BUFFERS); // depends on control dependency: [if], data = [none]
} else {
jitSupportedByNative = false; // depends on control dependency: [if], data = [none]
}
} catch (AsyncException ae) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "AioTCPChannel couldn't load native AIO library: " + ae.getMessage()); // depends on control dependency: [if], data = [none]
}
throw new ChannelException(ae);
}
if (!getConfig().isInbound()) {
boolean startSelectors = false;
if (wqm == null) {
wqm = new AioWorkQueueManager();
startSelectors = true;
}
super.connectionManager = new ConnectionManager(this, wqm);
if (startSelectors) {
wqm.startSelectors(false);
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
Tr.exit(tc, "setup");
}
return this;
} } |
public class class_name {
private void addPostParams(final Request request) {
if (friendlyName != null) {
request.addPostParam("FriendlyName", friendlyName);
}
if (awsCredentialsSid != null) {
request.addPostParam("AwsCredentialsSid", awsCredentialsSid);
}
if (encryptionKeySid != null) {
request.addPostParam("EncryptionKeySid", encryptionKeySid);
}
if (awsS3Url != null) {
request.addPostParam("AwsS3Url", awsS3Url.toString());
}
if (awsStorageEnabled != null) {
request.addPostParam("AwsStorageEnabled", awsStorageEnabled.toString());
}
if (encryptionEnabled != null) {
request.addPostParam("EncryptionEnabled", encryptionEnabled.toString());
}
} } | public class class_name {
private void addPostParams(final Request request) {
if (friendlyName != null) {
request.addPostParam("FriendlyName", friendlyName); // depends on control dependency: [if], data = [none]
}
if (awsCredentialsSid != null) {
request.addPostParam("AwsCredentialsSid", awsCredentialsSid); // depends on control dependency: [if], data = [none]
}
if (encryptionKeySid != null) {
request.addPostParam("EncryptionKeySid", encryptionKeySid); // depends on control dependency: [if], data = [none]
}
if (awsS3Url != null) {
request.addPostParam("AwsS3Url", awsS3Url.toString()); // depends on control dependency: [if], data = [none]
}
if (awsStorageEnabled != null) {
request.addPostParam("AwsStorageEnabled", awsStorageEnabled.toString()); // depends on control dependency: [if], data = [none]
}
if (encryptionEnabled != null) {
request.addPostParam("EncryptionEnabled", encryptionEnabled.toString()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static Object deserialize(String json, String containerType, Class cls, NestedContent nestedContent) throws ApiException {
try{
if(("List".equals(containerType) || "Array".equals(containerType)) && nestedContent != null){
if(NestedContent.CONTEXT.equals(nestedContent)){
JavaType typeInfo = JsonUtil.getJsonMapper().getTypeFactory().constructFromCanonical("java.util.List<java.util.List<io.cortical.rest.model.Context>>");
Object response = (java.lang.Object) JsonUtil.getJsonMapper().readValue(json, typeInfo);
return response;
}else if(NestedContent.TERM.equals(nestedContent)){
JavaType typeInfo = JsonUtil.getJsonMapper().getTypeFactory().constructFromCanonical("java.util.List<java.util.List<io.cortical.rest.model.Term>>");
Object response = (java.lang.Object) JsonUtil.getJsonMapper().readValue(json, typeInfo);
return response;
}else{
return null;
}
}
else if("List".equals(containerType) || "Array".equals(containerType)) {
JavaType typeInfo = JsonUtil.getJsonMapper().getTypeFactory().constructCollectionType(List.class, cls);
List response = (List<?>) JsonUtil.getJsonMapper().readValue(json, typeInfo);
return response;
}
else if(String.class.equals(cls)) {
if(json != null && json.startsWith("\"") && json.endsWith("\"") && json.length() > 1)
return json.substring(1, json.length() - 2);
else
return json;
}
else {
return JsonUtil.getJsonMapper().readValue(json, cls);
}
}
catch (IOException e) {
throw new ApiException(500, e.getMessage());
}
} } | public class class_name {
public static Object deserialize(String json, String containerType, Class cls, NestedContent nestedContent) throws ApiException {
try{
if(("List".equals(containerType) || "Array".equals(containerType)) && nestedContent != null){
if(NestedContent.CONTEXT.equals(nestedContent)){
JavaType typeInfo = JsonUtil.getJsonMapper().getTypeFactory().constructFromCanonical("java.util.List<java.util.List<io.cortical.rest.model.Context>>");
Object response = (java.lang.Object) JsonUtil.getJsonMapper().readValue(json, typeInfo);
return response; // depends on control dependency: [if], data = [none]
}else if(NestedContent.TERM.equals(nestedContent)){
JavaType typeInfo = JsonUtil.getJsonMapper().getTypeFactory().constructFromCanonical("java.util.List<java.util.List<io.cortical.rest.model.Term>>");
Object response = (java.lang.Object) JsonUtil.getJsonMapper().readValue(json, typeInfo);
return response; // depends on control dependency: [if], data = [none]
}else{
return null; // depends on control dependency: [if], data = [none]
}
}
else if("List".equals(containerType) || "Array".equals(containerType)) {
JavaType typeInfo = JsonUtil.getJsonMapper().getTypeFactory().constructCollectionType(List.class, cls);
List response = (List<?>) JsonUtil.getJsonMapper().readValue(json, typeInfo);
return response;
}
else if(String.class.equals(cls)) {
if(json != null && json.startsWith("\"") && json.endsWith("\"") && json.length() > 1)
return json.substring(1, json.length() - 2);
else
return json;
}
else {
return JsonUtil.getJsonMapper().readValue(json, cls);
}
}
catch (IOException e) {
throw new ApiException(500, e.getMessage());
}
} } |
public class class_name {
static List<String> flaggedKeyValues(final String flagName, @Nullable Map<?, ?> keyValueMapping) {
List<String> result = Lists.newArrayList();
if (keyValueMapping != null && keyValueMapping.size() > 0) {
for (Map.Entry<?, ?> entry : keyValueMapping.entrySet()) {
result.addAll(string(flagName, entry.getKey() + "=" + entry.getValue()));
}
return result;
}
return Collections.emptyList();
} } | public class class_name {
static List<String> flaggedKeyValues(final String flagName, @Nullable Map<?, ?> keyValueMapping) {
List<String> result = Lists.newArrayList();
if (keyValueMapping != null && keyValueMapping.size() > 0) {
for (Map.Entry<?, ?> entry : keyValueMapping.entrySet()) {
result.addAll(string(flagName, entry.getKey() + "=" + entry.getValue())); // depends on control dependency: [for], data = [entry]
}
return result; // depends on control dependency: [if], data = [none]
}
return Collections.emptyList();
} } |
public class class_name {
private void notifyChange() {
if (OllieProvider.isImplemented()) {
Ollie.getContext().getContentResolver().notifyChange(OllieProvider.createUri(getClass(), id), null);
}
} } | public class class_name {
private void notifyChange() {
if (OllieProvider.isImplemented()) {
Ollie.getContext().getContentResolver().notifyChange(OllieProvider.createUri(getClass(), id), null); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Nonnull
public Launcher createLauncher(TaskListener listener) {
SlaveComputer c = getComputer();
if (c == null) {
listener.error("Issue with creating launcher for agent " + name + ". Computer has been disconnected");
return new Launcher.DummyLauncher(listener);
} else {
// TODO: ideally all the logic below should be inside the SlaveComputer class with proper locking to prevent race conditions,
// but so far there is no locks for setNode() hence it requires serious refactoring
// Ensure that the Computer instance still points to this node
// Otherwise we may end up running the command on a wrong (reconnected) Node instance.
Slave node = c.getNode();
if (node != this) {
String message = "Issue with creating launcher for agent " + name + ". Computer has been reconnected";
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.log(Level.WARNING, message, new IllegalStateException("Computer has been reconnected, this Node instance cannot be used anymore"));
}
return new Launcher.DummyLauncher(listener);
}
// RemoteLauncher requires an active Channel instance to operate correctly
final Channel channel = c.getChannel();
if (channel == null) {
reportLauncherCreateError("The agent has not been fully initialized yet",
"No remoting channel to the agent OR it has not been fully initialized yet", listener);
return new Launcher.DummyLauncher(listener);
}
if (channel.isClosingOrClosed()) {
reportLauncherCreateError("The agent is being disconnected",
"Remoting channel is either in the process of closing down or has closed down", listener);
return new Launcher.DummyLauncher(listener);
}
final Boolean isUnix = c.isUnix();
if (isUnix == null) {
// isUnix is always set when the channel is not null, so it should never happen
reportLauncherCreateError("The agent has not been fully initialized yet",
"Cannot determing if the agent is a Unix one, the System status request has not completed yet. " +
"It is an invalid channel state, please report a bug to Jenkins if you see it.",
listener);
return new Launcher.DummyLauncher(listener);
}
return new RemoteLauncher(listener, channel, isUnix).decorateFor(this);
}
} } | public class class_name {
@Nonnull
public Launcher createLauncher(TaskListener listener) {
SlaveComputer c = getComputer();
if (c == null) {
listener.error("Issue with creating launcher for agent " + name + ". Computer has been disconnected"); // depends on control dependency: [if], data = [none]
return new Launcher.DummyLauncher(listener); // depends on control dependency: [if], data = [none]
} else {
// TODO: ideally all the logic below should be inside the SlaveComputer class with proper locking to prevent race conditions,
// but so far there is no locks for setNode() hence it requires serious refactoring
// Ensure that the Computer instance still points to this node
// Otherwise we may end up running the command on a wrong (reconnected) Node instance.
Slave node = c.getNode();
if (node != this) {
String message = "Issue with creating launcher for agent " + name + ". Computer has been reconnected"; // depends on control dependency: [if], data = [none]
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.log(Level.WARNING, message, new IllegalStateException("Computer has been reconnected, this Node instance cannot be used anymore")); // depends on control dependency: [if], data = [none]
}
return new Launcher.DummyLauncher(listener); // depends on control dependency: [if], data = [none]
}
// RemoteLauncher requires an active Channel instance to operate correctly
final Channel channel = c.getChannel();
if (channel == null) {
reportLauncherCreateError("The agent has not been fully initialized yet",
"No remoting channel to the agent OR it has not been fully initialized yet", listener); // depends on control dependency: [if], data = [none]
return new Launcher.DummyLauncher(listener); // depends on control dependency: [if], data = [none]
}
if (channel.isClosingOrClosed()) {
reportLauncherCreateError("The agent is being disconnected",
"Remoting channel is either in the process of closing down or has closed down", listener); // depends on control dependency: [if], data = [none]
return new Launcher.DummyLauncher(listener); // depends on control dependency: [if], data = [none]
}
final Boolean isUnix = c.isUnix();
if (isUnix == null) {
// isUnix is always set when the channel is not null, so it should never happen
reportLauncherCreateError("The agent has not been fully initialized yet",
"Cannot determing if the agent is a Unix one, the System status request has not completed yet. " +
"It is an invalid channel state, please report a bug to Jenkins if you see it.",
listener); // depends on control dependency: [if], data = [none]
return new Launcher.DummyLauncher(listener); // depends on control dependency: [if], data = [none]
}
return new RemoteLauncher(listener, channel, isUnix).decorateFor(this); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public Event<T> addCompletionEvent(Event<?> completionEvent) {
if (completionEvents == null) {
completionEvents = new HashSet<>();
}
completionEvents.add(completionEvent);
return this;
} } | public class class_name {
public Event<T> addCompletionEvent(Event<?> completionEvent) {
if (completionEvents == null) {
completionEvents = new HashSet<>(); // depends on control dependency: [if], data = [none]
}
completionEvents.add(completionEvent);
return this;
} } |
public class class_name {
public static int minimumRequestSize(Request request) {
// Header and payload are common inside a Frame at the protocol level
// Frame header has a fixed size of 9 for protocol version >= V3, which includes Frame flags
// size
int size = FrameCodec.headerEncodedSize();
if (!request.getCustomPayload().isEmpty()) {
// Custom payload is not supported in v3, but assume user won't have a custom payload set if
// they use this version
size += PrimitiveSizes.sizeOfBytesMap(request.getCustomPayload());
}
return size;
} } | public class class_name {
public static int minimumRequestSize(Request request) {
// Header and payload are common inside a Frame at the protocol level
// Frame header has a fixed size of 9 for protocol version >= V3, which includes Frame flags
// size
int size = FrameCodec.headerEncodedSize();
if (!request.getCustomPayload().isEmpty()) {
// Custom payload is not supported in v3, but assume user won't have a custom payload set if
// they use this version
size += PrimitiveSizes.sizeOfBytesMap(request.getCustomPayload()); // depends on control dependency: [if], data = [none]
}
return size;
} } |
public class class_name {
public Q setShort(final int index, final Number value) {
if (value == null) {
setNull(index, Types.SMALLINT);
}
else {
setShort(index, value.shortValue());
}
return _this();
} } | public class class_name {
public Q setShort(final int index, final Number value) {
if (value == null) {
setNull(index, Types.SMALLINT); // depends on control dependency: [if], data = [none]
}
else {
setShort(index, value.shortValue()); // depends on control dependency: [if], data = [none]
}
return _this();
} } |
public class class_name {
@Override
public <NV extends NumberVector> NV projectRenderToDataSpace(double[] v, NumberVector.Factory<NV> prototype) {
final int dim = v.length;
double[] vec = projectRenderToScaled(v);
// Not calling {@link #projectScaledToDataSpace} to avoid extra copy of
// vector.
for(int d = 0; d < dim; d++) {
vec[d] = scales[d].getUnscaled(vec[d]);
}
return prototype.newNumberVector(vec);
} } | public class class_name {
@Override
public <NV extends NumberVector> NV projectRenderToDataSpace(double[] v, NumberVector.Factory<NV> prototype) {
final int dim = v.length;
double[] vec = projectRenderToScaled(v);
// Not calling {@link #projectScaledToDataSpace} to avoid extra copy of
// vector.
for(int d = 0; d < dim; d++) {
vec[d] = scales[d].getUnscaled(vec[d]); // depends on control dependency: [for], data = [d]
}
return prototype.newNumberVector(vec);
} } |
public class class_name {
@Deprecated
public String getScope() {
StringBuilder scopeTextStringBuilder = new StringBuilder();
for (DomainAlwaysInScopeMatcher domainInScope : domainsAlwaysInScope) {
if (domainInScope.isRegex()) {
scopeTextStringBuilder.append("\\Q").append(domainInScope.getValue()).append("\\E");
} else {
scopeTextStringBuilder.append(domainInScope.getValue());
}
scopeTextStringBuilder.append('|');
}
if (scopeTextStringBuilder.length() != 0) {
scopeTextStringBuilder.append("(");
scopeTextStringBuilder.replace(scopeTextStringBuilder.length() - 1, scopeTextStringBuilder.length() - 1, ")$");
}
return scopeTextStringBuilder.toString();
} } | public class class_name {
@Deprecated
public String getScope() {
StringBuilder scopeTextStringBuilder = new StringBuilder();
for (DomainAlwaysInScopeMatcher domainInScope : domainsAlwaysInScope) {
if (domainInScope.isRegex()) {
scopeTextStringBuilder.append("\\Q").append(domainInScope.getValue()).append("\\E"); // depends on control dependency: [if], data = [none]
} else {
scopeTextStringBuilder.append(domainInScope.getValue()); // depends on control dependency: [if], data = [none]
}
scopeTextStringBuilder.append('|'); // depends on control dependency: [for], data = [none]
}
if (scopeTextStringBuilder.length() != 0) {
scopeTextStringBuilder.append("("); // depends on control dependency: [if], data = [none]
scopeTextStringBuilder.replace(scopeTextStringBuilder.length() - 1, scopeTextStringBuilder.length() - 1, ")$"); // depends on control dependency: [if], data = [(scopeTextStringBuilder.length()]
}
return scopeTextStringBuilder.toString();
} } |
public class class_name {
public static void rotatePage(ImageInputStream imageInput, ImageOutputStream imageOutput, int degree, int pageIndex)
throws IOException {
ImageInputStream input = null;
try {
List<TIFFPage> pages = getPages(imageInput);
if (pageIndex != -1) {
pages.get(pageIndex).rotate(degree);
}
else {
for (TIFFPage tiffPage : pages) {
tiffPage.rotate(degree);
}
}
writePages(imageOutput, pages);
}
finally {
if (input != null) {
input.close();
}
}
} } | public class class_name {
public static void rotatePage(ImageInputStream imageInput, ImageOutputStream imageOutput, int degree, int pageIndex)
throws IOException {
ImageInputStream input = null;
try {
List<TIFFPage> pages = getPages(imageInput);
if (pageIndex != -1) {
pages.get(pageIndex).rotate(degree); // depends on control dependency: [if], data = [(pageIndex]
}
else {
for (TIFFPage tiffPage : pages) {
tiffPage.rotate(degree); // depends on control dependency: [for], data = [tiffPage]
}
}
writePages(imageOutput, pages);
}
finally {
if (input != null) {
input.close(); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public UserAuthentication getOrCreateUser(App app, String accessToken) throws IOException {
UserAuthentication userAuth = null;
User user = new User();
if (accessToken != null) {
String acceptHeader = SecurityUtils.getSettingForApp(app, "security.oauth.accept_header", "");
HttpGet profileGet = new HttpGet(SecurityUtils.getSettingForApp(app, "security.oauth.profile_url", ""));
profileGet.setHeader(HttpHeaders.AUTHORIZATION, "Bearer " + accessToken);
Map<String, Object> profile = null;
if (!StringUtils.isBlank(acceptHeader)) {
profileGet.setHeader(HttpHeaders.ACCEPT, acceptHeader);
}
try (CloseableHttpResponse resp2 = httpclient.execute(profileGet)) {
HttpEntity respEntity = resp2.getEntity();
if (respEntity != null) {
profile = jreader.readValue(respEntity.getContent());
EntityUtils.consumeQuietly(respEntity);
}
}
String accountIdParam = SecurityUtils.getSettingForApp(app, "security.oauth.parameters.id", "sub");
String pictureParam = SecurityUtils.getSettingForApp(app, "security.oauth.parameters.picture", "picture");
String emailDomain = SecurityUtils.getSettingForApp(app, "security.oauth.domain", "paraio.com");
String emailParam = SecurityUtils.getSettingForApp(app, "security.oauth.parameters.email", "email");
String nameParam = SecurityUtils.getSettingForApp(app, "security.oauth.parameters.name", "name");
if (profile != null && profile.containsKey(accountIdParam)) {
String oauthAccountId = (String) profile.get(accountIdParam);
String pic = (String) profile.get(pictureParam);
String email = (String) profile.get(emailParam);
String name = (String) profile.get(nameParam);
user.setAppid(getAppid(app));
user.setIdentifier(Config.OAUTH2_PREFIX.concat(oauthAccountId));
user.setEmail(email);
user = User.readUserForIdentifier(user);
if (user == null) {
//user is new
user = new User();
user.setActive(true);
user.setAppid(getAppid(app));
user.setEmail(StringUtils.isBlank(email) ? oauthAccountId + "@" + emailDomain : email);
user.setName(StringUtils.isBlank(name) ? "No Name" : name);
user.setPassword(Utils.generateSecurityToken());
user.setPicture(getPicture(pic));
user.setIdentifier(Config.OAUTH2_PREFIX.concat(oauthAccountId));
String id = user.create();
if (id == null) {
throw new AuthenticationServiceException("Authentication failed: cannot create new user.");
}
} else {
String picture = getPicture(pic);
boolean update = false;
if (!StringUtils.equals(user.getPicture(), picture)) {
user.setPicture(picture);
update = true;
}
if (!StringUtils.isBlank(email) && !StringUtils.equals(user.getEmail(), email)) {
user.setEmail(email);
update = true;
}
if (update) {
user.update();
}
}
userAuth = new UserAuthentication(new AuthenticatedUserDetails(user));
}
}
return SecurityUtils.checkIfActive(userAuth, user, false);
} } | public class class_name {
public UserAuthentication getOrCreateUser(App app, String accessToken) throws IOException {
UserAuthentication userAuth = null;
User user = new User();
if (accessToken != null) {
String acceptHeader = SecurityUtils.getSettingForApp(app, "security.oauth.accept_header", "");
HttpGet profileGet = new HttpGet(SecurityUtils.getSettingForApp(app, "security.oauth.profile_url", ""));
profileGet.setHeader(HttpHeaders.AUTHORIZATION, "Bearer " + accessToken);
Map<String, Object> profile = null;
if (!StringUtils.isBlank(acceptHeader)) {
profileGet.setHeader(HttpHeaders.ACCEPT, acceptHeader);
}
try (CloseableHttpResponse resp2 = httpclient.execute(profileGet)) {
HttpEntity respEntity = resp2.getEntity();
if (respEntity != null) {
profile = jreader.readValue(respEntity.getContent()); // depends on control dependency: [if], data = [(respEntity]
EntityUtils.consumeQuietly(respEntity); // depends on control dependency: [if], data = [(respEntity]
}
}
String accountIdParam = SecurityUtils.getSettingForApp(app, "security.oauth.parameters.id", "sub");
String pictureParam = SecurityUtils.getSettingForApp(app, "security.oauth.parameters.picture", "picture");
String emailDomain = SecurityUtils.getSettingForApp(app, "security.oauth.domain", "paraio.com");
String emailParam = SecurityUtils.getSettingForApp(app, "security.oauth.parameters.email", "email");
String nameParam = SecurityUtils.getSettingForApp(app, "security.oauth.parameters.name", "name");
if (profile != null && profile.containsKey(accountIdParam)) {
String oauthAccountId = (String) profile.get(accountIdParam);
String pic = (String) profile.get(pictureParam);
String email = (String) profile.get(emailParam);
String name = (String) profile.get(nameParam);
user.setAppid(getAppid(app));
user.setIdentifier(Config.OAUTH2_PREFIX.concat(oauthAccountId));
user.setEmail(email);
user = User.readUserForIdentifier(user);
if (user == null) {
//user is new
user = new User(); // depends on control dependency: [if], data = [none]
user.setActive(true); // depends on control dependency: [if], data = [none]
user.setAppid(getAppid(app)); // depends on control dependency: [if], data = [none]
user.setEmail(StringUtils.isBlank(email) ? oauthAccountId + "@" + emailDomain : email); // depends on control dependency: [if], data = [none]
user.setName(StringUtils.isBlank(name) ? "No Name" : name); // depends on control dependency: [if], data = [none]
user.setPassword(Utils.generateSecurityToken()); // depends on control dependency: [if], data = [none]
user.setPicture(getPicture(pic)); // depends on control dependency: [if], data = [none]
user.setIdentifier(Config.OAUTH2_PREFIX.concat(oauthAccountId)); // depends on control dependency: [if], data = [none]
String id = user.create();
if (id == null) {
throw new AuthenticationServiceException("Authentication failed: cannot create new user.");
}
} else {
String picture = getPicture(pic);
boolean update = false;
if (!StringUtils.equals(user.getPicture(), picture)) {
user.setPicture(picture); // depends on control dependency: [if], data = [none]
update = true; // depends on control dependency: [if], data = [none]
}
if (!StringUtils.isBlank(email) && !StringUtils.equals(user.getEmail(), email)) {
user.setEmail(email); // depends on control dependency: [if], data = [none]
update = true; // depends on control dependency: [if], data = [none]
}
if (update) {
user.update(); // depends on control dependency: [if], data = [none]
}
}
userAuth = new UserAuthentication(new AuthenticatedUserDetails(user));
}
}
return SecurityUtils.checkIfActive(userAuth, user, false);
} } |
public class class_name {
public void unregisterServerSocketHandler( NIOServerSocketHandler serverHandler )
{
if (pendingAcceptHandlers.remove(serverHandler))
return; // Not handled yet
if (serverHandlers.remove(serverHandler))
{
closeSocketChannel(serverHandler.getServerSocketChannel(), selector);
wakeUpAndWait();
}
} } | public class class_name {
public void unregisterServerSocketHandler( NIOServerSocketHandler serverHandler )
{
if (pendingAcceptHandlers.remove(serverHandler))
return; // Not handled yet
if (serverHandlers.remove(serverHandler))
{
closeSocketChannel(serverHandler.getServerSocketChannel(), selector); // depends on control dependency: [if], data = [none]
wakeUpAndWait(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@SuppressWarnings("unchecked") @Nullable public Object resolveArgument(@NotNull String name,
@NotNull Operation.Variables variables) {
checkNotNull(name, "name == null");
checkNotNull(variables, "variables == null");
Map<String, Object> variableValues = variables.valueMap();
Object argumentValue = arguments.get(name);
if (argumentValue instanceof Map) {
Map<String, Object> argumentValueMap = (Map<String, Object>) argumentValue;
if (isArgumentValueVariableType(argumentValueMap)) {
String variableName = argumentValueMap.get(VARIABLE_NAME_KEY).toString();
return variableValues.get(variableName);
} else {
return null;
}
}
return argumentValue;
} } | public class class_name {
@SuppressWarnings("unchecked") @Nullable public Object resolveArgument(@NotNull String name,
@NotNull Operation.Variables variables) {
checkNotNull(name, "name == null");
checkNotNull(variables, "variables == null");
Map<String, Object> variableValues = variables.valueMap();
Object argumentValue = arguments.get(name);
if (argumentValue instanceof Map) {
Map<String, Object> argumentValueMap = (Map<String, Object>) argumentValue;
if (isArgumentValueVariableType(argumentValueMap)) {
String variableName = argumentValueMap.get(VARIABLE_NAME_KEY).toString();
return variableValues.get(variableName); // depends on control dependency: [if], data = [none]
} else {
return null; // depends on control dependency: [if], data = [none]
}
}
return argumentValue;
} } |
public class class_name {
public <T> Class<T> isAssignableFrom(final Class<?> superType, final Class<T> type) {
if (!superType.isAssignableFrom(type)) {
fail(String.format(DEFAULT_IS_ASSIGNABLE_EX_MESSAGE, type == null ? "null" : type.getName(), superType.getName()));
}
return type;
} } | public class class_name {
public <T> Class<T> isAssignableFrom(final Class<?> superType, final Class<T> type) {
if (!superType.isAssignableFrom(type)) {
fail(String.format(DEFAULT_IS_ASSIGNABLE_EX_MESSAGE, type == null ? "null" : type.getName(), superType.getName())); // depends on control dependency: [if], data = [none]
}
return type;
} } |
public class class_name {
public ParsedPolicy parse(File file) throws Exception {
if (file == null || !file.exists()) {
if (debug) {
if (file == null) {
ProGradePolicyDebugger.log("Given File is null");
} else {
if (!file.exists()) {
ProGradePolicyDebugger.log("Policy file " + file.getCanonicalPath() + " doesn't exists.");
}
}
}
throw new Exception("ER007: File with policy doesn't exists!");
}
if (debug) {
ProGradePolicyDebugger.log("Parsing policy " + file.getCanonicalPath());
}
final InputStreamReader reader = new InputStreamReader(new FileInputStream(file), "UTF-8");
try {
return parse(reader);
} finally {
reader.close();
}
} } | public class class_name {
public ParsedPolicy parse(File file) throws Exception {
if (file == null || !file.exists()) {
if (debug) {
if (file == null) {
ProGradePolicyDebugger.log("Given File is null"); // depends on control dependency: [if], data = [none]
} else {
if (!file.exists()) {
ProGradePolicyDebugger.log("Policy file " + file.getCanonicalPath() + " doesn't exists.");
}
}
}
throw new Exception("ER007: File with policy doesn't exists!"); // depends on control dependency: [if], data = [none]
}
if (debug) {
ProGradePolicyDebugger.log("Parsing policy " + file.getCanonicalPath()); // depends on control dependency: [if], data = [none]
}
final InputStreamReader reader = new InputStreamReader(new FileInputStream(file), "UTF-8");
try {
return parse(reader); // depends on control dependency: [try], data = [none]
} finally {
reader.close();
}
} } |
public class class_name {
@NonNull
public static Runnable onSchedule(@NonNull Runnable run) {
ObjectHelper.requireNonNull(run, "run is null");
Function<? super Runnable, ? extends Runnable> f = onScheduleHandler;
if (f == null) {
return run;
}
return apply(f, run);
} } | public class class_name {
@NonNull
public static Runnable onSchedule(@NonNull Runnable run) {
ObjectHelper.requireNonNull(run, "run is null");
Function<? super Runnable, ? extends Runnable> f = onScheduleHandler;
if (f == null) {
return run; // depends on control dependency: [if], data = [none]
}
return apply(f, run);
} } |
public class class_name {
static boolean isPreferred(StorageLocationType type, StorageDirectory sd) {
if ((sd instanceof NNStorageDirectory)) {
return ((NNStorageDirectory) sd).type == type;
}
// by default all are preferred
return true;
} } | public class class_name {
static boolean isPreferred(StorageLocationType type, StorageDirectory sd) {
if ((sd instanceof NNStorageDirectory)) {
return ((NNStorageDirectory) sd).type == type; // depends on control dependency: [if], data = [none]
}
// by default all are preferred
return true;
} } |
public class class_name {
protected static <K, V> HashTrieMap<K, V> makeHashTrieMap(int hash0, CompactHashMap<K, V> elem0, int hash1, CompactHashMap<K, V> elem1, int level, int size) {
int index0 = (hash0 >>> level) & 0x1f;
int index1 = (hash1 >>> level) & 0x1f;
if (index0 != index1) {
int bitmap = (1 << index0) | (1 << index1);
@SuppressWarnings("unchecked")
Object[] elems = new Object[2];
if (index0 < index1) {
elems[0] = unwrap(elem0);
elems[1] = unwrap(elem1);
} else {
elems[0] = unwrap(elem1);
elems[1] = unwrap(elem0);
}
return new HashTrieMap<K, V>(bitmap, elems, size);
} else {
@SuppressWarnings("unchecked")
Object[] elems = new Object[1];
int bitmap = (1 << index0);
elems[0] = makeHashTrieMap(hash0, elem0, hash1, elem1, level + 5, size);
return new HashTrieMap<K, V>(bitmap, elems, size);
}
} } | public class class_name {
protected static <K, V> HashTrieMap<K, V> makeHashTrieMap(int hash0, CompactHashMap<K, V> elem0, int hash1, CompactHashMap<K, V> elem1, int level, int size) {
int index0 = (hash0 >>> level) & 0x1f;
int index1 = (hash1 >>> level) & 0x1f;
if (index0 != index1) {
int bitmap = (1 << index0) | (1 << index1);
@SuppressWarnings("unchecked")
Object[] elems = new Object[2];
if (index0 < index1) {
elems[0] = unwrap(elem0); // depends on control dependency: [if], data = [none]
elems[1] = unwrap(elem1); // depends on control dependency: [if], data = [none]
} else {
elems[0] = unwrap(elem1); // depends on control dependency: [if], data = [none]
elems[1] = unwrap(elem0); // depends on control dependency: [if], data = [none]
}
return new HashTrieMap<K, V>(bitmap, elems, size); // depends on control dependency: [if], data = [none]
} else {
@SuppressWarnings("unchecked")
Object[] elems = new Object[1];
int bitmap = (1 << index0);
elems[0] = makeHashTrieMap(hash0, elem0, hash1, elem1, level + 5, size); // depends on control dependency: [if], data = [none]
return new HashTrieMap<K, V>(bitmap, elems, size); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void setModel(Progression model) {
this.model.removeProgressionListener(new WeakListener(this, this.model));
if (model == null) {
this.model = new DefaultProgression();
} else {
this.model = model;
}
this.previousValue = this.model.getValue();
this.model.addProgressionListener(new WeakListener(this, this.model));
} } | public class class_name {
public void setModel(Progression model) {
this.model.removeProgressionListener(new WeakListener(this, this.model));
if (model == null) {
this.model = new DefaultProgression(); // depends on control dependency: [if], data = [none]
} else {
this.model = model; // depends on control dependency: [if], data = [none]
}
this.previousValue = this.model.getValue();
this.model.addProgressionListener(new WeakListener(this, this.model));
} } |
public class class_name {
public void tick() {
final long count = uncounted.sumThenReset();
final double instantRate = count / interval;
if (initialized) {
rate += (alpha * (instantRate - rate));
} else {
rate = instantRate;
initialized = true;
}
} } | public class class_name {
public void tick() {
final long count = uncounted.sumThenReset();
final double instantRate = count / interval;
if (initialized) {
rate += (alpha * (instantRate - rate)); // depends on control dependency: [if], data = [none]
} else {
rate = instantRate; // depends on control dependency: [if], data = [none]
initialized = true; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private static int electronsForAtomType(IAtom atom) {
Integer electrons = TYPES.get(atom.getAtomTypeName());
if (electrons != null) return electrons;
try {
IAtomType atomType = AtomTypeFactory.getInstance("org/openscience/cdk/dict/data/cdk-atom-types.owl",
atom.getBuilder()).getAtomType(atom.getAtomTypeName());
electrons = atomType.getProperty(CDKConstants.PI_BOND_COUNT);
return electrons != null ? electrons : 0;
} catch (NoSuchAtomTypeException e) {
throw new IllegalArgumentException(e);
}
} } | public class class_name {
private static int electronsForAtomType(IAtom atom) {
Integer electrons = TYPES.get(atom.getAtomTypeName());
if (electrons != null) return electrons;
try {
IAtomType atomType = AtomTypeFactory.getInstance("org/openscience/cdk/dict/data/cdk-atom-types.owl",
atom.getBuilder()).getAtomType(atom.getAtomTypeName());
electrons = atomType.getProperty(CDKConstants.PI_BOND_COUNT); // depends on control dependency: [try], data = [none]
return electrons != null ? electrons : 0; // depends on control dependency: [try], data = [none]
} catch (NoSuchAtomTypeException e) {
throw new IllegalArgumentException(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private void setExchangeRateChain(List<ExchangeRate> chain) {
this.chain.clear();
if (Objects.isNull(chain) || chain.isEmpty()) {
this.chain.add(this);
} else {
for (ExchangeRate rate : chain) {
if (Objects.isNull(rate)) {
throw new IllegalArgumentException("Rate Chain element can not be null.");
}
}
this.chain.addAll(chain);
}
} } | public class class_name {
private void setExchangeRateChain(List<ExchangeRate> chain) {
this.chain.clear();
if (Objects.isNull(chain) || chain.isEmpty()) {
this.chain.add(this); // depends on control dependency: [if], data = [none]
} else {
for (ExchangeRate rate : chain) {
if (Objects.isNull(rate)) {
throw new IllegalArgumentException("Rate Chain element can not be null.");
}
}
this.chain.addAll(chain); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public Object setProperty(final String iName, final Object iValue) {
if (iValue != null) {
return properties.put(iName.toLowerCase(Locale.ENGLISH), iValue);
} else {
return properties.remove(iName.toLowerCase(Locale.ENGLISH));
}
} } | public class class_name {
public Object setProperty(final String iName, final Object iValue) {
if (iValue != null) {
return properties.put(iName.toLowerCase(Locale.ENGLISH), iValue); // depends on control dependency: [if], data = [none]
} else {
return properties.remove(iName.toLowerCase(Locale.ENGLISH)); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static <T> CloseableFlowableWithReset<T> cache(final Flowable<T> source,
final long duration, final TimeUnit unit, final Scheduler scheduler) {
final AtomicReference<CachedFlowable<T>> cacheRef = new AtomicReference<CachedFlowable<T>>();
final AtomicReference<Optional<Scheduler.Worker>> workerRef = new AtomicReference<Optional<Scheduler.Worker>>(
Optional.<Scheduler.Worker>absent());
CachedFlowable<T> cache = new CachedFlowable<T>(source);
cacheRef.set(cache);
Runnable closeAction = new Runnable() {
@Override
public void run() {
while (true) {
Optional<Scheduler.Worker> w = workerRef.get();
if (w == null) {
// we are finished
break;
} else {
if (workerRef.compareAndSet(w, null)) {
if (w.isPresent()) {
w.get().dispose();
}
// we are finished
workerRef.set(null);
break;
}
}
// if not finished then try again
}
}
};
Runnable resetAction = new Runnable() {
@Override
public void run() {
startScheduledResetAgain(duration, unit, scheduler, cacheRef, workerRef);
}
};
return new CloseableFlowableWithReset<T>(cache, closeAction, resetAction);
} } | public class class_name {
public static <T> CloseableFlowableWithReset<T> cache(final Flowable<T> source,
final long duration, final TimeUnit unit, final Scheduler scheduler) {
final AtomicReference<CachedFlowable<T>> cacheRef = new AtomicReference<CachedFlowable<T>>();
final AtomicReference<Optional<Scheduler.Worker>> workerRef = new AtomicReference<Optional<Scheduler.Worker>>(
Optional.<Scheduler.Worker>absent());
CachedFlowable<T> cache = new CachedFlowable<T>(source);
cacheRef.set(cache);
Runnable closeAction = new Runnable() {
@Override
public void run() {
while (true) {
Optional<Scheduler.Worker> w = workerRef.get();
if (w == null) {
// we are finished
break;
} else {
if (workerRef.compareAndSet(w, null)) {
if (w.isPresent()) {
w.get().dispose(); // depends on control dependency: [if], data = [none]
}
// we are finished
workerRef.set(null); // depends on control dependency: [if], data = [none]
break;
}
}
// if not finished then try again
}
}
};
Runnable resetAction = new Runnable() {
@Override
public void run() {
startScheduledResetAgain(duration, unit, scheduler, cacheRef, workerRef);
}
};
return new CloseableFlowableWithReset<T>(cache, closeAction, resetAction);
} } |
public class class_name {
@Override
public void write(Batch batch) throws StageException {
Multimap<String, Record> partitions = ELUtils.partitionBatchByExpression(sObjectNameEval,
sObjectNameVars,
conf.sObjectNameTemplate,
batch
);
Set<String> sObjectNames = partitions.keySet();
for (String sObjectName : sObjectNames) {
List<OnRecordErrorException> errors = writer.writeBatch(
sObjectName,
partitions.get(sObjectName),
this
);
for (OnRecordErrorException error : errors) {
errorRecordHandler.onError(error);
}
}
} } | public class class_name {
@Override
public void write(Batch batch) throws StageException {
Multimap<String, Record> partitions = ELUtils.partitionBatchByExpression(sObjectNameEval,
sObjectNameVars,
conf.sObjectNameTemplate,
batch
);
Set<String> sObjectNames = partitions.keySet();
for (String sObjectName : sObjectNames) {
List<OnRecordErrorException> errors = writer.writeBatch(
sObjectName,
partitions.get(sObjectName),
this
);
for (OnRecordErrorException error : errors) {
errorRecordHandler.onError(error); // depends on control dependency: [for], data = [error]
}
}
} } |
public class class_name {
public WorldState get() throws Exception {
while (!this.ready) {
synchronized (this) {
try {
this.wait();
} catch (InterruptedException e) {
// Ignored
Thread.yield();
}
}
}
if (this.error != null) {
throw this.error;
}
return this.state;
} } | public class class_name {
public WorldState get() throws Exception {
while (!this.ready) {
synchronized (this) {
try {
this.wait(); // depends on control dependency: [try], data = [none]
} catch (InterruptedException e) {
// Ignored
Thread.yield();
} // depends on control dependency: [catch], data = [none]
}
}
if (this.error != null) {
throw this.error;
}
return this.state;
} } |
public class class_name {
@Override
public void parse(Reader reader, Sink sink) throws ParseException {
String source = null;
try {
if ((source = IOUtil.toString(reader)) == null) {
source = "";
}
}
catch (IOException ex) {
getLog().error("Could not read AsciiDoc source: " + ex.getLocalizedMessage());
return;
}
MavenProject project = mavenProjectProvider.get();
Xpp3Dom siteConfig = getSiteConfig(project);
File siteDirectory = resolveSiteDirectory(project, siteConfig);
OptionsBuilder options = processAsciiDocConfig(
project,
siteConfig,
initOptions(project, siteDirectory),
initAttributes(project, siteDirectory));
// QUESTION should we keep OptionsBuilder & AttributesBuilder separate for call to convertAsciiDoc?
sink.rawText(convertAsciiDoc(source, options));
} } | public class class_name {
@Override
public void parse(Reader reader, Sink sink) throws ParseException {
String source = null;
try {
if ((source = IOUtil.toString(reader)) == null) {
source = ""; // depends on control dependency: [if], data = [none]
}
}
catch (IOException ex) {
getLog().error("Could not read AsciiDoc source: " + ex.getLocalizedMessage());
return;
}
MavenProject project = mavenProjectProvider.get();
Xpp3Dom siteConfig = getSiteConfig(project);
File siteDirectory = resolveSiteDirectory(project, siteConfig);
OptionsBuilder options = processAsciiDocConfig(
project,
siteConfig,
initOptions(project, siteDirectory),
initAttributes(project, siteDirectory));
// QUESTION should we keep OptionsBuilder & AttributesBuilder separate for call to convertAsciiDoc?
sink.rawText(convertAsciiDoc(source, options));
} } |
public class class_name {
private static boolean localSymtabExtends(SymbolTable superset, SymbolTable subset)
{
if (subset.getMaxId() > superset.getMaxId())
{
// the subset has more symbols
return false;
}
// NB this API almost certainly requires cloning--symbol table's API doesn't give us a way to polymorphically
// get this without materializing an array
final SymbolTable[] supersetImports = superset.getImportedTables();
final SymbolTable[] subsetImports = subset.getImportedTables();
// TODO this is over-strict, but not as strict as LocalSymbolTable.symtabExtends()
if (supersetImports.length != subsetImports.length)
{
return false;
}
// NB we cannot trust Arrays.equals--we don't know how an implementation will implement it...
for (int i = 0; i < supersetImports.length; i++)
{
final SymbolTable supersetImport = supersetImports[i];
final SymbolTable subsetImport = subsetImports[i];
if (!supersetImport.getName().equals(subsetImport.getName())
|| supersetImport.getVersion() != subsetImport.getVersion())
{
// bad match on import
return false;
}
}
// all the imports lined up, lets make sure the locals line up too
final Iterator<String> supersetIter = superset.iterateDeclaredSymbolNames();
final Iterator<String> subsetIter = subset.iterateDeclaredSymbolNames();
while (subsetIter.hasNext())
{
final String nextSubsetSymbol = subsetIter.next();
final String nextSupersetSymbol = supersetIter.next();
if (!nextSubsetSymbol.equals(nextSupersetSymbol))
{
// local symbol mismatch
return false;
}
}
// we made it this far--superset is really a superset of subset
return true;
} } | public class class_name {
private static boolean localSymtabExtends(SymbolTable superset, SymbolTable subset)
{
if (subset.getMaxId() > superset.getMaxId())
{
// the subset has more symbols
return false; // depends on control dependency: [if], data = [none]
}
// NB this API almost certainly requires cloning--symbol table's API doesn't give us a way to polymorphically
// get this without materializing an array
final SymbolTable[] supersetImports = superset.getImportedTables();
final SymbolTable[] subsetImports = subset.getImportedTables();
// TODO this is over-strict, but not as strict as LocalSymbolTable.symtabExtends()
if (supersetImports.length != subsetImports.length)
{
return false; // depends on control dependency: [if], data = [none]
}
// NB we cannot trust Arrays.equals--we don't know how an implementation will implement it...
for (int i = 0; i < supersetImports.length; i++)
{
final SymbolTable supersetImport = supersetImports[i];
final SymbolTable subsetImport = subsetImports[i];
if (!supersetImport.getName().equals(subsetImport.getName())
|| supersetImport.getVersion() != subsetImport.getVersion())
{
// bad match on import
return false; // depends on control dependency: [if], data = [none]
}
}
// all the imports lined up, lets make sure the locals line up too
final Iterator<String> supersetIter = superset.iterateDeclaredSymbolNames();
final Iterator<String> subsetIter = subset.iterateDeclaredSymbolNames();
while (subsetIter.hasNext())
{
final String nextSubsetSymbol = subsetIter.next();
final String nextSupersetSymbol = supersetIter.next();
if (!nextSubsetSymbol.equals(nextSupersetSymbol))
{
// local symbol mismatch
return false; // depends on control dependency: [if], data = [none]
}
}
// we made it this far--superset is really a superset of subset
return true;
} } |
public class class_name {
public final void mNEWLINE() throws RecognitionException {
try {
// druidG.g:716:20: ( ( '\\r\\n' | '\\r' | '\\n' ) )
// druidG.g:716:23: ( '\\r\\n' | '\\r' | '\\n' )
{
// druidG.g:716:23: ( '\\r\\n' | '\\r' | '\\n' )
int alt40=3;
int LA40_0 = input.LA(1);
if ( (LA40_0=='\r') ) {
int LA40_1 = input.LA(2);
if ( (LA40_1=='\n') ) {
alt40=1;
}
else {
alt40=2;
}
}
else if ( (LA40_0=='\n') ) {
alt40=3;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 40, 0, input);
throw nvae;
}
switch (alt40) {
case 1 :
// druidG.g:716:25: '\\r\\n'
{
match("\r\n");
}
break;
case 2 :
// druidG.g:717:18: '\\r'
{
match('\r');
}
break;
case 3 :
// druidG.g:718:18: '\\n'
{
match('\n');
}
break;
}
}
}
finally {
// do for sure before leaving
}
} } | public class class_name {
public final void mNEWLINE() throws RecognitionException {
try {
// druidG.g:716:20: ( ( '\\r\\n' | '\\r' | '\\n' ) )
// druidG.g:716:23: ( '\\r\\n' | '\\r' | '\\n' )
{
// druidG.g:716:23: ( '\\r\\n' | '\\r' | '\\n' )
int alt40=3;
int LA40_0 = input.LA(1);
if ( (LA40_0=='\r') ) {
int LA40_1 = input.LA(2);
if ( (LA40_1=='\n') ) {
alt40=1; // depends on control dependency: [if], data = [none]
}
else {
alt40=2; // depends on control dependency: [if], data = [none]
}
}
else if ( (LA40_0=='\n') ) {
alt40=3;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 40, 0, input);
throw nvae;
}
switch (alt40) {
case 1 :
// druidG.g:716:25: '\\r\\n'
{
match("\r\n");
}
break;
case 2 :
// druidG.g:717:18: '\\r'
{
match('\r');
}
break;
case 3 :
// druidG.g:718:18: '\\n'
{
match('\n');
}
break;
}
}
}
finally {
// do for sure before leaving
}
} } |
public class class_name {
public static void handleBOMUTF8(String[] vales, int i) {
byte[] buf = vales[i].getBytes();
if (LOG.isDebugEnabled()) {
StringBuilder sb = new StringBuilder();
for (byte b : buf) {
sb.append(Byte.toString(b));
sb.append(" ");
}
LOG.debug(vales[i]);
LOG.debug(sb.toString());
}
if (buf[0] == (byte) 0xEF && buf[1] == (byte) 0xBB
&& buf[2] == (byte) 0xBF) {
vales[i] = new String(buf, 3, buf.length - 3);
}
} } | public class class_name {
public static void handleBOMUTF8(String[] vales, int i) {
byte[] buf = vales[i].getBytes();
if (LOG.isDebugEnabled()) {
StringBuilder sb = new StringBuilder();
for (byte b : buf) {
sb.append(Byte.toString(b)); // depends on control dependency: [for], data = [b]
sb.append(" "); // depends on control dependency: [for], data = [b]
}
LOG.debug(vales[i]); // depends on control dependency: [if], data = [none]
LOG.debug(sb.toString()); // depends on control dependency: [if], data = [none]
}
if (buf[0] == (byte) 0xEF && buf[1] == (byte) 0xBB
&& buf[2] == (byte) 0xBF) {
vales[i] = new String(buf, 3, buf.length - 3); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public ListClustersResult withClusters(ClusterSummary... clusters) {
if (this.clusters == null) {
setClusters(new com.amazonaws.internal.SdkInternalList<ClusterSummary>(clusters.length));
}
for (ClusterSummary ele : clusters) {
this.clusters.add(ele);
}
return this;
} } | public class class_name {
public ListClustersResult withClusters(ClusterSummary... clusters) {
if (this.clusters == null) {
setClusters(new com.amazonaws.internal.SdkInternalList<ClusterSummary>(clusters.length)); // depends on control dependency: [if], data = [none]
}
for (ClusterSummary ele : clusters) {
this.clusters.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
protected void marshalRuleHeader(final RuleModel model,
final StringBuilder buf) {
buf.append("rule \"" + marshalRuleName(model) + "\"");
if (null != model.parentName && model.parentName.length() > 0) {
buf.append(" extends \"" + model.parentName + "\"\n");
} else {
buf.append('\n');
}
} } | public class class_name {
protected void marshalRuleHeader(final RuleModel model,
final StringBuilder buf) {
buf.append("rule \"" + marshalRuleName(model) + "\"");
if (null != model.parentName && model.parentName.length() > 0) {
buf.append(" extends \"" + model.parentName + "\"\n"); // depends on control dependency: [if], data = [none]
} else {
buf.append('\n'); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public String getURL()
{
validateNotEmpty( m_groupId, true, "Group" );
validateNotEmpty( m_artifactId, true, "Artifact" );
final StringBuilder url = new StringBuilder();
url.append( "mvn:" ).append( m_groupId ).append( "/" ).append( m_artifactId );
if( m_version != null || m_type != null || m_classifier != null )
{
url.append( "/" );
}
if( m_version != null )
{
url.append( m_version );
}
if( m_type != null || m_classifier != null )
{
url.append( "/" );
}
if( m_type != null )
{
url.append( m_type );
}
if( m_classifier != null )
{
url.append( "/" ).append( m_classifier );
}
return url.toString();
} } | public class class_name {
public String getURL()
{
validateNotEmpty( m_groupId, true, "Group" );
validateNotEmpty( m_artifactId, true, "Artifact" );
final StringBuilder url = new StringBuilder();
url.append( "mvn:" ).append( m_groupId ).append( "/" ).append( m_artifactId );
if( m_version != null || m_type != null || m_classifier != null )
{
url.append( "/" ); // depends on control dependency: [if], data = [none]
}
if( m_version != null )
{
url.append( m_version ); // depends on control dependency: [if], data = [( m_version]
}
if( m_type != null || m_classifier != null )
{
url.append( "/" ); // depends on control dependency: [if], data = [none]
}
if( m_type != null )
{
url.append( m_type ); // depends on control dependency: [if], data = [( m_type]
}
if( m_classifier != null )
{
url.append( "/" ).append( m_classifier ); // depends on control dependency: [if], data = [( m_classifier]
}
return url.toString();
} } |
public class class_name {
public void setLabelInfo(CommandButtonLabelInfo labelInfo) {
if (labelInfo == null) {
labelInfo = CommandButtonLabelInfo.BLANK_BUTTON_LABEL;
}
CommandButtonLabelInfo old = this.labelInfo;
this.labelInfo = labelInfo;
firePropertyChange(LABEL_INFO_PROPERTY, old, this.labelInfo);
} } | public class class_name {
public void setLabelInfo(CommandButtonLabelInfo labelInfo) {
if (labelInfo == null) {
labelInfo = CommandButtonLabelInfo.BLANK_BUTTON_LABEL; // depends on control dependency: [if], data = [none]
}
CommandButtonLabelInfo old = this.labelInfo;
this.labelInfo = labelInfo;
firePropertyChange(LABEL_INFO_PROPERTY, old, this.labelInfo);
} } |
public class class_name {
public Map<String, SortedMap<Float, CmsWorkplaceEditorConfiguration>> getConfigurableEditors() {
Map<String, SortedMap<Float, CmsWorkplaceEditorConfiguration>> configurableEditors = new HashMap<String, SortedMap<Float, CmsWorkplaceEditorConfiguration>>();
Iterator<CmsWorkplaceEditorConfiguration> i = m_editorConfigurations.iterator();
while (i.hasNext()) {
CmsWorkplaceEditorConfiguration currentConfig = i.next();
// get all resource types specified for the current editor configuration
Iterator<String> k = currentConfig.getResourceTypes().keySet().iterator();
while (k.hasNext()) {
// key is the current resource type of the configuration
String key = k.next();
// check if the current resource type is only a reference to another resource type
CmsExplorerTypeSettings settings = OpenCms.getWorkplaceManager().getExplorerTypeSetting(key);
if ((settings == null) || CmsStringUtil.isNotEmpty(settings.getReference())) {
// skip this resource type
continue;
}
if ((currentConfig.getMappingForResourceType(key) == null)
|| currentConfig.getMappingForResourceType(key).equals(key)) {
// editor is configurable for specified resource type
SortedMap<Float, CmsWorkplaceEditorConfiguration> editorConfigs = configurableEditors.get(key);
if (editorConfigs == null) {
// no configuration map present for resource type, create one
editorConfigs = new TreeMap<Float, CmsWorkplaceEditorConfiguration>();
}
// put the current editor configuration to the resource map with ranking value as key
editorConfigs.put(new Float(currentConfig.getRankingForResourceType(key)), currentConfig);
// put the resource map to the result map with resource type as key
configurableEditors.put(key, editorConfigs);
}
}
}
return configurableEditors;
} } | public class class_name {
public Map<String, SortedMap<Float, CmsWorkplaceEditorConfiguration>> getConfigurableEditors() {
Map<String, SortedMap<Float, CmsWorkplaceEditorConfiguration>> configurableEditors = new HashMap<String, SortedMap<Float, CmsWorkplaceEditorConfiguration>>();
Iterator<CmsWorkplaceEditorConfiguration> i = m_editorConfigurations.iterator();
while (i.hasNext()) {
CmsWorkplaceEditorConfiguration currentConfig = i.next();
// get all resource types specified for the current editor configuration
Iterator<String> k = currentConfig.getResourceTypes().keySet().iterator();
while (k.hasNext()) {
// key is the current resource type of the configuration
String key = k.next();
// check if the current resource type is only a reference to another resource type
CmsExplorerTypeSettings settings = OpenCms.getWorkplaceManager().getExplorerTypeSetting(key);
if ((settings == null) || CmsStringUtil.isNotEmpty(settings.getReference())) {
// skip this resource type
continue;
}
if ((currentConfig.getMappingForResourceType(key) == null)
|| currentConfig.getMappingForResourceType(key).equals(key)) {
// editor is configurable for specified resource type
SortedMap<Float, CmsWorkplaceEditorConfiguration> editorConfigs = configurableEditors.get(key);
if (editorConfigs == null) {
// no configuration map present for resource type, create one
editorConfigs = new TreeMap<Float, CmsWorkplaceEditorConfiguration>(); // depends on control dependency: [if], data = [none]
}
// put the current editor configuration to the resource map with ranking value as key
editorConfigs.put(new Float(currentConfig.getRankingForResourceType(key)), currentConfig); // depends on control dependency: [if], data = [none]
// put the resource map to the result map with resource type as key
configurableEditors.put(key, editorConfigs); // depends on control dependency: [if], data = [none]
}
}
}
return configurableEditors;
} } |
public class class_name {
public String getAuthType()
{
if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){
checkRequestObjectInUse();
}
String authType = null;
//if (com.ibm.ws.security.core.SecurityContext.isSecurityEnabled())
if(((WebAppDispatcherContext)this.getDispatchContext()).isSecurityEnabledForApplication())
{
authType = (String) getPrivateAttribute("AUTH_TYPE");
}
else
{
authType = _request.getAuthType();
}
//321485
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15
logger.logp(Level.FINE, CLASS_NAME,"getAuthType", " authType --> " + authType);
}
// return one of the static vars defined in HttpServletRequest
if (authType != null)
{
if (authType.equals("BASIC"))
return HttpServletRequest.BASIC_AUTH;
else if (authType.equals("CLIENT_CERT"))
return HttpServletRequest.CLIENT_CERT_AUTH;
else if (authType.equals("DIGEST"))
return HttpServletRequest.DIGEST_AUTH;
else if (authType.equals("FORM"))
return HttpServletRequest.FORM_AUTH;
}
return authType;
} } | public class class_name {
public String getAuthType()
{
if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){
checkRequestObjectInUse(); // depends on control dependency: [if], data = [none]
}
String authType = null;
//if (com.ibm.ws.security.core.SecurityContext.isSecurityEnabled())
if(((WebAppDispatcherContext)this.getDispatchContext()).isSecurityEnabledForApplication())
{
authType = (String) getPrivateAttribute("AUTH_TYPE"); // depends on control dependency: [if], data = [none]
}
else
{
authType = _request.getAuthType(); // depends on control dependency: [if], data = [none]
}
//321485
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15
logger.logp(Level.FINE, CLASS_NAME,"getAuthType", " authType --> " + authType); // depends on control dependency: [if], data = [none]
}
// return one of the static vars defined in HttpServletRequest
if (authType != null)
{
if (authType.equals("BASIC"))
return HttpServletRequest.BASIC_AUTH;
else if (authType.equals("CLIENT_CERT"))
return HttpServletRequest.CLIENT_CERT_AUTH;
else if (authType.equals("DIGEST"))
return HttpServletRequest.DIGEST_AUTH;
else if (authType.equals("FORM"))
return HttpServletRequest.FORM_AUTH;
}
return authType;
} } |
public class class_name {
public static boolean tryToInsertChildInstance( AbstractApplication application, Instance parentInstance, Instance childInstance ) {
// First, make sure there is no child instance with this name before inserting.
// Otherwise, removing the child instance may result randomly.
boolean hasAlreadyAChildWithThisName = hasChildWithThisName( application, parentInstance, childInstance.getName());
// We insert a "root instance"
boolean success = false;
if( parentInstance == null ) {
if( ! hasAlreadyAChildWithThisName
&& ComponentHelpers.findAllAncestors( childInstance.getComponent()).isEmpty()) {
application.getRootInstances().add( childInstance );
success = true;
// No validation here, but maybe we should...
}
}
// We insert a child instance
else {
if( ! hasAlreadyAChildWithThisName
&& ComponentHelpers.findAllChildren( parentInstance.getComponent()).contains( childInstance.getComponent())) {
InstanceHelpers.insertChild( parentInstance, childInstance );
Collection<Instance> allInstances = InstanceHelpers.getAllInstances( application );
Collection<ModelError> errors = RuntimeModelValidator.validate( allInstances );
if( RoboconfErrorHelpers.containsCriticalErrors( errors )) {
parentInstance.getChildren().remove( childInstance );
childInstance.setParent( null );
} else {
success = true;
}
}
}
return success;
} } | public class class_name {
public static boolean tryToInsertChildInstance( AbstractApplication application, Instance parentInstance, Instance childInstance ) {
// First, make sure there is no child instance with this name before inserting.
// Otherwise, removing the child instance may result randomly.
boolean hasAlreadyAChildWithThisName = hasChildWithThisName( application, parentInstance, childInstance.getName());
// We insert a "root instance"
boolean success = false;
if( parentInstance == null ) {
if( ! hasAlreadyAChildWithThisName
&& ComponentHelpers.findAllAncestors( childInstance.getComponent()).isEmpty()) {
application.getRootInstances().add( childInstance ); // depends on control dependency: [if], data = [none]
success = true; // depends on control dependency: [if], data = [none]
// No validation here, but maybe we should...
}
}
// We insert a child instance
else {
if( ! hasAlreadyAChildWithThisName
&& ComponentHelpers.findAllChildren( parentInstance.getComponent()).contains( childInstance.getComponent())) {
InstanceHelpers.insertChild( parentInstance, childInstance ); // depends on control dependency: [if], data = [none]
Collection<Instance> allInstances = InstanceHelpers.getAllInstances( application );
Collection<ModelError> errors = RuntimeModelValidator.validate( allInstances );
if( RoboconfErrorHelpers.containsCriticalErrors( errors )) {
parentInstance.getChildren().remove( childInstance ); // depends on control dependency: [if], data = [none]
childInstance.setParent( null ); // depends on control dependency: [if], data = [none]
} else {
success = true; // depends on control dependency: [if], data = [none]
}
}
}
return success;
} } |
public class class_name {
public <T extends Runnable> T executeSyncTimed(T runnable, long inMs) {
try {
Thread.sleep(inMs);
this.executeSync(runnable);
} catch (InterruptedException e) {
e.printStackTrace();
}
return runnable;
} } | public class class_name {
public <T extends Runnable> T executeSyncTimed(T runnable, long inMs) {
try {
Thread.sleep(inMs); // depends on control dependency: [try], data = [none]
this.executeSync(runnable); // depends on control dependency: [try], data = [none]
} catch (InterruptedException e) {
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
return runnable;
} } |
public class class_name {
public void setPeriodAfterStart(ReadablePeriod period) {
if (period == null) {
setEndMillis(getStartMillis());
} else {
setEndMillis(getChronology().add(period, getStartMillis(), 1));
}
} } | public class class_name {
public void setPeriodAfterStart(ReadablePeriod period) {
if (period == null) {
setEndMillis(getStartMillis()); // depends on control dependency: [if], data = [none]
} else {
setEndMillis(getChronology().add(period, getStartMillis(), 1)); // depends on control dependency: [if], data = [(period]
}
} } |
public class class_name {
@Override
public void destroy()
{
if (indexManager != null)
{
indexManager.close();
}
if (schemaManager != null)
{
schemaManager.dropSchema();
}
schemaManager = null;
externalProperties = null;
for (Object connectionPool : hostPools.values())
{
if (connectionPool != null && connectionPool.getClass().isAssignableFrom(ConnectionPool.class))
{
((ConnectionPool) connectionPool).close(true);
}
}
((CassandraRetryService) hostRetryService).shutdown();
} } | public class class_name {
@Override
public void destroy()
{
if (indexManager != null)
{
indexManager.close(); // depends on control dependency: [if], data = [none]
}
if (schemaManager != null)
{
schemaManager.dropSchema(); // depends on control dependency: [if], data = [none]
}
schemaManager = null;
externalProperties = null;
for (Object connectionPool : hostPools.values())
{
if (connectionPool != null && connectionPool.getClass().isAssignableFrom(ConnectionPool.class))
{
((ConnectionPool) connectionPool).close(true); // depends on control dependency: [if], data = [none]
}
}
((CassandraRetryService) hostRetryService).shutdown();
} } |
public class class_name {
protected Change createChange(String key) {
String language = importSource.getLanguage();
String importedValue = importSource.getValue(key);
Status importedStatus = importSource.getStatus(key);
// basic change
Change change = new Change(Change.TYPE_NO_CHANGE, language, key, importedValue, importedStatus);
ITextNode textNode = db.getTextNode(key);
if (importSource.hasMasterLanguage()) {
String masterLanguage = importSource.getMasterLanguage();
change.setMasterLanguage(masterLanguage);
// imported master value
change.setImportedMasterValue(importSource.getMasterValue(key));
// db master value
if (textNode != null) {
IValueNode masterValueNode = textNode.getValueNode(masterLanguage);
if (masterValueNode != null) {
change.setDbMasterValue(masterValueNode.getValue());
}
}
}
if (textNode != null) {
change.setContext(textNode.getContext());
IValueNode valueNode = textNode.getValueNode(language);
if (valueNode != null) {
change.setDbValue(valueNode.getValue());
change.setDbStatus(valueNode.getStatus());
}
}
setChangeType(change);
return change;
} } | public class class_name {
protected Change createChange(String key) {
String language = importSource.getLanguage();
String importedValue = importSource.getValue(key);
Status importedStatus = importSource.getStatus(key);
// basic change
Change change = new Change(Change.TYPE_NO_CHANGE, language, key, importedValue, importedStatus);
ITextNode textNode = db.getTextNode(key);
if (importSource.hasMasterLanguage()) {
String masterLanguage = importSource.getMasterLanguage();
change.setMasterLanguage(masterLanguage); // depends on control dependency: [if], data = [none]
// imported master value
change.setImportedMasterValue(importSource.getMasterValue(key)); // depends on control dependency: [if], data = [none]
// db master value
if (textNode != null) {
IValueNode masterValueNode = textNode.getValueNode(masterLanguage);
if (masterValueNode != null) {
change.setDbMasterValue(masterValueNode.getValue()); // depends on control dependency: [if], data = [(masterValueNode]
}
}
}
if (textNode != null) {
change.setContext(textNode.getContext()); // depends on control dependency: [if], data = [(textNode]
IValueNode valueNode = textNode.getValueNode(language);
if (valueNode != null) {
change.setDbValue(valueNode.getValue()); // depends on control dependency: [if], data = [(valueNode]
change.setDbStatus(valueNode.getStatus()); // depends on control dependency: [if], data = [(valueNode]
}
}
setChangeType(change);
return change;
} } |
public class class_name {
public void error(NetworkConnection vc, IOWriteRequestContext wrc, IOException t) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "error", new Object[]{vc, wrc, t}); // F176003, F184828
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled() && (t != null)) SibTr.exception(this, tc, t); // F176003
if (connection.isLoggingIOEvents()) connection.getConnectionEventRecorder().logDebug("error method invoked on write context " + System.identityHashCode(wrc) + " with exception " + t);
try {
//Note that this also deals with the buffer returned by getBuffer.
WsByteBuffer[] buffers;
buffers = writeCtx.getBuffers();
writeCtx.setBuffers(null);
if (buffers != null) {
for (WsByteBuffer buffer : buffers) {
try {
if (buffer != null) buffer.release();
} catch (RuntimeException e) {
//Absorb any exceptions if it gets released by another thread (for example by Connection.nonThreadSafePhysicalClose).
//No FFDC code needed
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Caught exception on releasing buffer.", e);
}
}
} else {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Request has no buffers: " + writeCtx);
}
// Deal with the error by invalidating the connection. That'll teach 'em.
final String message = "IOException received - " + t == null ? "" : t.getMessage();
connection.invalidate(false, t, message); // F176003, F224570
} catch (Error error) {
FFDCFilter.processException
(error, "com.ibm.ws.sib.jfapchannel.impl.ConnectionWriteCompletedCallback", JFapChannelConstants.CONNWRITECOMPCALLBACK_COMPLETE_01, connection.getDiagnostics(true));
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.exception(this, tc, error);
// It might appear slightly odd for this code to catch Error (especially since the JDK docs say
// that Error means that something has gone so badly wrong that you should abandon all hope).
// This code makes one final stab at putting out some diagnostics about what happened (if we
// propagate the Error up to the TCP Channel, it is sometimes lost) and closing down the
// connection. I figured that we might as well try to do something - as we can hardly make
// things worse... (famous last words)
connection.invalidate(false, error, "Error caught in ConnectionWriteCompletedCallback.error()");
// Re-throw the error to ensure that it causes the maximum devastation.
// The JVM is probably very ill if an Error is thrown so attempt no recovery.
throw error;
} catch (RuntimeException runtimeException) {
FFDCFilter.processException
(runtimeException, "com.ibm.ws.sib.jfapchannel.impl.ConnectionWriteCompletedCallback", JFapChannelConstants.CONNWRITECOMPCALLBACK_COMPLETE_05, connection.getDiagnostics(true));
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.exception(this, tc, runtimeException);
// We can reasonably try to recover from a runtime exception by invalidating the associated
// connection. This should drive the underlying TCP/IP socket to be closed.
connection.invalidate(false, runtimeException, "RuntimeException caught in ConnectionWriteCompletedCallback.error()");
// Don't throw the RuntimeException on as we risk blowing away part of the TCP channel.
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "error");
} } | public class class_name {
public void error(NetworkConnection vc, IOWriteRequestContext wrc, IOException t) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "error", new Object[]{vc, wrc, t}); // F176003, F184828
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled() && (t != null)) SibTr.exception(this, tc, t); // F176003
if (connection.isLoggingIOEvents()) connection.getConnectionEventRecorder().logDebug("error method invoked on write context " + System.identityHashCode(wrc) + " with exception " + t);
try {
//Note that this also deals with the buffer returned by getBuffer.
WsByteBuffer[] buffers;
buffers = writeCtx.getBuffers(); // depends on control dependency: [try], data = [none]
writeCtx.setBuffers(null); // depends on control dependency: [try], data = [none]
if (buffers != null) {
for (WsByteBuffer buffer : buffers) {
try {
if (buffer != null) buffer.release();
} catch (RuntimeException e) {
//Absorb any exceptions if it gets released by another thread (for example by Connection.nonThreadSafePhysicalClose).
//No FFDC code needed
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Caught exception on releasing buffer.", e);
} // depends on control dependency: [catch], data = [none]
}
} else {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Request has no buffers: " + writeCtx);
}
// Deal with the error by invalidating the connection. That'll teach 'em.
final String message = "IOException received - " + t == null ? "" : t.getMessage();
connection.invalidate(false, t, message); // F176003, F224570 // depends on control dependency: [try], data = [none]
} catch (Error error) {
FFDCFilter.processException
(error, "com.ibm.ws.sib.jfapchannel.impl.ConnectionWriteCompletedCallback", JFapChannelConstants.CONNWRITECOMPCALLBACK_COMPLETE_01, connection.getDiagnostics(true));
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.exception(this, tc, error);
// It might appear slightly odd for this code to catch Error (especially since the JDK docs say
// that Error means that something has gone so badly wrong that you should abandon all hope).
// This code makes one final stab at putting out some diagnostics about what happened (if we
// propagate the Error up to the TCP Channel, it is sometimes lost) and closing down the
// connection. I figured that we might as well try to do something - as we can hardly make
// things worse... (famous last words)
connection.invalidate(false, error, "Error caught in ConnectionWriteCompletedCallback.error()");
// Re-throw the error to ensure that it causes the maximum devastation.
// The JVM is probably very ill if an Error is thrown so attempt no recovery.
throw error;
} catch (RuntimeException runtimeException) { // depends on control dependency: [catch], data = [none]
FFDCFilter.processException
(runtimeException, "com.ibm.ws.sib.jfapchannel.impl.ConnectionWriteCompletedCallback", JFapChannelConstants.CONNWRITECOMPCALLBACK_COMPLETE_05, connection.getDiagnostics(true));
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.exception(this, tc, runtimeException);
// We can reasonably try to recover from a runtime exception by invalidating the associated
// connection. This should drive the underlying TCP/IP socket to be closed.
connection.invalidate(false, runtimeException, "RuntimeException caught in ConnectionWriteCompletedCallback.error()");
// Don't throw the RuntimeException on as we risk blowing away part of the TCP channel.
} // depends on control dependency: [catch], data = [none]
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "error");
} } |
public class class_name {
private void processDefaultHeader(ParserContext context) {
for (String line : context.getFileLines()) {
if (hasHeaderSeparator(line)) {
break;
}
processHeaderLine(line, context.getDocumentModel());
}
} } | public class class_name {
private void processDefaultHeader(ParserContext context) {
for (String line : context.getFileLines()) {
if (hasHeaderSeparator(line)) {
break;
}
processHeaderLine(line, context.getDocumentModel()); // depends on control dependency: [for], data = [line]
}
} } |
public class class_name {
private InMemoryMetricEmitter extractInMemoryMetricEmitter(
final MetricReportManager metricManager) {
InMemoryMetricEmitter memoryEmitter = null;
for (final IMetricEmitter emitter : metricManager.getMetricEmitters()) {
if (emitter instanceof InMemoryMetricEmitter) {
memoryEmitter = (InMemoryMetricEmitter) emitter;
break;
}
}
return memoryEmitter;
} } | public class class_name {
private InMemoryMetricEmitter extractInMemoryMetricEmitter(
final MetricReportManager metricManager) {
InMemoryMetricEmitter memoryEmitter = null;
for (final IMetricEmitter emitter : metricManager.getMetricEmitters()) {
if (emitter instanceof InMemoryMetricEmitter) {
memoryEmitter = (InMemoryMetricEmitter) emitter; // depends on control dependency: [if], data = [none]
break;
}
}
return memoryEmitter;
} } |
public class class_name {
@Override
public void fit(DataSet dataSet) {
featureStats = (S) newBuilder().addFeatures(dataSet).build();
if (isFitLabel()) {
labelStats = (S) newBuilder().addLabels(dataSet).build();
}
} } | public class class_name {
@Override
public void fit(DataSet dataSet) {
featureStats = (S) newBuilder().addFeatures(dataSet).build();
if (isFitLabel()) {
labelStats = (S) newBuilder().addLabels(dataSet).build(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public final void ruleOpOther() throws RecognitionException {
int stackSize = keepStackSize();
try {
// InternalXbase.g:396:2: ( ( ( rule__OpOther__Alternatives ) ) )
// InternalXbase.g:397:2: ( ( rule__OpOther__Alternatives ) )
{
// InternalXbase.g:397:2: ( ( rule__OpOther__Alternatives ) )
// InternalXbase.g:398:3: ( rule__OpOther__Alternatives )
{
if ( state.backtracking==0 ) {
before(grammarAccess.getOpOtherAccess().getAlternatives());
}
// InternalXbase.g:399:3: ( rule__OpOther__Alternatives )
// InternalXbase.g:399:4: rule__OpOther__Alternatives
{
pushFollow(FOLLOW_2);
rule__OpOther__Alternatives();
state._fsp--;
if (state.failed) return ;
}
if ( state.backtracking==0 ) {
after(grammarAccess.getOpOtherAccess().getAlternatives());
}
}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
restoreStackSize(stackSize);
}
return ;
} } | public class class_name {
public final void ruleOpOther() throws RecognitionException {
int stackSize = keepStackSize();
try {
// InternalXbase.g:396:2: ( ( ( rule__OpOther__Alternatives ) ) )
// InternalXbase.g:397:2: ( ( rule__OpOther__Alternatives ) )
{
// InternalXbase.g:397:2: ( ( rule__OpOther__Alternatives ) )
// InternalXbase.g:398:3: ( rule__OpOther__Alternatives )
{
if ( state.backtracking==0 ) {
before(grammarAccess.getOpOtherAccess().getAlternatives()); // depends on control dependency: [if], data = [none]
}
// InternalXbase.g:399:3: ( rule__OpOther__Alternatives )
// InternalXbase.g:399:4: rule__OpOther__Alternatives
{
pushFollow(FOLLOW_2);
rule__OpOther__Alternatives();
state._fsp--;
if (state.failed) return ;
}
if ( state.backtracking==0 ) {
after(grammarAccess.getOpOtherAccess().getAlternatives()); // depends on control dependency: [if], data = [none]
}
}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
restoreStackSize(stackSize);
}
return ;
} } |
public class class_name {
public final int getInteger(String attribute, String... path)
{
try
{
return Integer.parseInt(getNodeString(attribute, path));
}
catch (final NumberFormatException exception)
{
throw new LionEngineException(exception, media);
}
} } | public class class_name {
public final int getInteger(String attribute, String... path)
{
try
{
return Integer.parseInt(getNodeString(attribute, path)); // depends on control dependency: [try], data = [none]
}
catch (final NumberFormatException exception)
{
throw new LionEngineException(exception, media);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static String getCanonicalDirectoryName(String path) {
String realPath = getCanonicalPath(path);
if (realPath == null) {
return null;
}
String separator = System.getProperty("file.separator", "/");
// backslashes must be escaped for split(). kthxbai
separator = Pattern.quote(separator);
String[] pathElements = realPath.split(separator);
return pathElements.length > 0 ? pathElements[pathElements.length - 1] : realPath;
} } | public class class_name {
public static String getCanonicalDirectoryName(String path) {
String realPath = getCanonicalPath(path);
if (realPath == null) {
return null;
// depends on control dependency: [if], data = [none]
}
String separator = System.getProperty("file.separator", "/");
// backslashes must be escaped for split(). kthxbai
separator = Pattern.quote(separator);
String[] pathElements = realPath.split(separator);
return pathElements.length > 0 ? pathElements[pathElements.length - 1] : realPath;
} } |
public class class_name {
public void setSlots(java.util.Collection<Slot> slots) {
if (slots == null) {
this.slots = null;
return;
}
this.slots = new java.util.ArrayList<Slot>(slots);
} } | public class class_name {
public void setSlots(java.util.Collection<Slot> slots) {
if (slots == null) {
this.slots = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.slots = new java.util.ArrayList<Slot>(slots);
} } |
public class class_name {
@SuppressFBWarnings ("IL_INFINITE_LOOP")
public static boolean isParentDirectory (@Nonnull final Path aSearchDirectory, @Nonnull final Path aStartDirectory)
{
ValueEnforcer.notNull (aSearchDirectory, "SearchDirectory");
ValueEnforcer.notNull (aStartDirectory, "StartDirectory");
Path aRealSearchDirectory = aSearchDirectory.toAbsolutePath ();
Path aRealStartDirectory = aStartDirectory.toAbsolutePath ();
try
{
aRealSearchDirectory = getCanonicalFile (aRealSearchDirectory);
aRealStartDirectory = getCanonicalFile (aRealStartDirectory);
}
catch (final IOException ex)
{
// ignore
}
if (aRealSearchDirectory.toFile ().isDirectory ())
{
Path aParent = aRealStartDirectory;
while (aParent != null)
{
if (aParent.equals (aRealSearchDirectory))
return true;
aParent = aParent.getParent ();
}
}
return false;
} } | public class class_name {
@SuppressFBWarnings ("IL_INFINITE_LOOP")
public static boolean isParentDirectory (@Nonnull final Path aSearchDirectory, @Nonnull final Path aStartDirectory)
{
ValueEnforcer.notNull (aSearchDirectory, "SearchDirectory");
ValueEnforcer.notNull (aStartDirectory, "StartDirectory");
Path aRealSearchDirectory = aSearchDirectory.toAbsolutePath ();
Path aRealStartDirectory = aStartDirectory.toAbsolutePath ();
try
{
aRealSearchDirectory = getCanonicalFile (aRealSearchDirectory); // depends on control dependency: [try], data = [none]
aRealStartDirectory = getCanonicalFile (aRealStartDirectory); // depends on control dependency: [try], data = [none]
}
catch (final IOException ex)
{
// ignore
} // depends on control dependency: [catch], data = [none]
if (aRealSearchDirectory.toFile ().isDirectory ())
{
Path aParent = aRealStartDirectory;
while (aParent != null)
{
if (aParent.equals (aRealSearchDirectory))
return true;
aParent = aParent.getParent (); // depends on control dependency: [while], data = [none]
}
}
return false;
} } |
public class class_name {
public static URI setPath(final URI initialUri, final String path) {
String finalPath = path;
if (!finalPath.startsWith("/")) {
finalPath = '/' + path;
}
try {
if (initialUri.getHost() == null && initialUri.getAuthority() != null) {
return new URI(initialUri.getScheme(), initialUri.getAuthority(), finalPath,
initialUri.getQuery(),
initialUri.getFragment());
} else {
return new URI(initialUri.getScheme(), initialUri.getUserInfo(), initialUri.getHost(),
initialUri.getPort(),
finalPath, initialUri.getQuery(), initialUri.getFragment());
}
} catch (URISyntaxException e) {
throw ExceptionUtils.getRuntimeException(e);
}
} } | public class class_name {
public static URI setPath(final URI initialUri, final String path) {
String finalPath = path;
if (!finalPath.startsWith("/")) {
finalPath = '/' + path; // depends on control dependency: [if], data = [none]
}
try {
if (initialUri.getHost() == null && initialUri.getAuthority() != null) {
return new URI(initialUri.getScheme(), initialUri.getAuthority(), finalPath,
initialUri.getQuery(),
initialUri.getFragment()); // depends on control dependency: [if], data = [none]
} else {
return new URI(initialUri.getScheme(), initialUri.getUserInfo(), initialUri.getHost(),
initialUri.getPort(),
finalPath, initialUri.getQuery(), initialUri.getFragment()); // depends on control dependency: [if], data = [none]
}
} catch (URISyntaxException e) {
throw ExceptionUtils.getRuntimeException(e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public ListDedicatedHostsResponse listDedicatedHosts(ListDedicatedHostsRequest request) {
InternalRequest internalRequest = this.createRequest(request, HttpMethodName.GET, null);
if (!Strings.isNullOrEmpty(request.getMarker())) {
internalRequest.addParameter("marker", request.getMarker());
}
if (request.getMaxKeys() >= 0) {
internalRequest.addParameter("maxKeys", String.valueOf(request.getMaxKeys()));
}
if (!Strings.isNullOrEmpty(request.getZoneName())) {
internalRequest.addParameter("zoneName", request.getZoneName());
}
return invokeHttpClient(internalRequest, ListDedicatedHostsResponse.class);
} } | public class class_name {
public ListDedicatedHostsResponse listDedicatedHosts(ListDedicatedHostsRequest request) {
InternalRequest internalRequest = this.createRequest(request, HttpMethodName.GET, null);
if (!Strings.isNullOrEmpty(request.getMarker())) {
internalRequest.addParameter("marker", request.getMarker()); // depends on control dependency: [if], data = [none]
}
if (request.getMaxKeys() >= 0) {
internalRequest.addParameter("maxKeys", String.valueOf(request.getMaxKeys())); // depends on control dependency: [if], data = [(request.getMaxKeys()]
}
if (!Strings.isNullOrEmpty(request.getZoneName())) {
internalRequest.addParameter("zoneName", request.getZoneName()); // depends on control dependency: [if], data = [none]
}
return invokeHttpClient(internalRequest, ListDedicatedHostsResponse.class);
} } |
public class class_name {
public void prepare(ResponseBuilder rb, ComponentFields mtasFields)
throws IOException {
Set<String> ids = MtasSolrResultUtil
.getIdsFromParameters(rb.req.getParams(), PARAM_MTAS_DOCUMENT);
if (!ids.isEmpty()) {
int tmpCounter = 0;
String[] fields = new String[ids.size()];
String[] keys = new String[ids.size()];
String[] prefixes = new String[ids.size()];
String[] types = new String[ids.size()];
String[] regexps = new String[ids.size()];
String[] lists = new String[ids.size()];
Boolean[] listRegexps = new Boolean[ids.size()];
Boolean[] listExpands = new Boolean[ids.size()];
int[] listExpandNumbers = new int[ids.size()];
String[] ignoreRegexps = new String[ids.size()];
String[] ignoreLists = new String[ids.size()];
Boolean[] ignoreListRegexps = new Boolean[ids.size()];
String[] listNumbers = new String[ids.size()];
for (String id : ids) {
fields[tmpCounter] = rb.req.getParams().get(
PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_FIELD,
null);
keys[tmpCounter] = rb.req.getParams()
.get(PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_KEY,
String.valueOf(tmpCounter))
.trim();
prefixes[tmpCounter] = rb.req.getParams().get(
PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_PREFIX,
null);
types[tmpCounter] = rb.req.getParams().get(
PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_TYPE,
null);
regexps[tmpCounter] = rb.req.getParams().get(
PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_REGEXP,
null);
lists[tmpCounter] = rb.req.getParams().get(
PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_LIST,
null);
listRegexps[tmpCounter] = rb.req.getParams().getBool(PARAM_MTAS_DOCUMENT
+ "." + id + "." + NAME_MTAS_DOCUMENT_LIST_REGEXP, false);
listExpands[tmpCounter] = rb.req.getParams().getBool(PARAM_MTAS_DOCUMENT
+ "." + id + "." + NAME_MTAS_DOCUMENT_LIST_EXPAND, false);
listExpandNumbers[tmpCounter] = rb.req.getParams()
.getInt(PARAM_MTAS_DOCUMENT + "." + id + "."
+ NAME_MTAS_DOCUMENT_LIST_EXPAND_NUMBER, 10);
ignoreRegexps[tmpCounter] = rb.req.getParams().get(PARAM_MTAS_DOCUMENT
+ "." + id + "." + NAME_MTAS_DOCUMENT_IGNORE_REGEXP, null);
ignoreLists[tmpCounter] = rb.req.getParams().get(PARAM_MTAS_DOCUMENT
+ "." + id + "." + NAME_MTAS_DOCUMENT_IGNORE_LIST, null);
ignoreListRegexps[tmpCounter] = rb.req.getParams()
.getBool(PARAM_MTAS_DOCUMENT + "." + id + "."
+ NAME_MTAS_DOCUMENT_IGNORE_LIST_REGEXP, false);
listNumbers[tmpCounter] = rb.req.getParams().get(
PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_NUMBER,
null);
tmpCounter++;
}
String uniqueKeyField = rb.req.getSchema().getUniqueKeyField().getName();
mtasFields.doDocument = true;
rb.setNeedDocList(true);
for (String field : fields) {
if (field == null || field.isEmpty()) {
throw new IOException("no (valid) field in mtas document");
} else if (!mtasFields.list.containsKey(field)) {
mtasFields.list.put(field, new ComponentField(uniqueKeyField));
}
}
MtasSolrResultUtil.compareAndCheck(keys, fields, NAME_MTAS_DOCUMENT_KEY,
NAME_MTAS_DOCUMENT_FIELD, true);
MtasSolrResultUtil.compareAndCheck(prefixes, fields,
NAME_MTAS_DOCUMENT_PREFIX, NAME_MTAS_DOCUMENT_FIELD, false);
MtasSolrResultUtil.compareAndCheck(types, fields, NAME_MTAS_DOCUMENT_TYPE,
NAME_MTAS_DOCUMENT_FIELD, false);
MtasSolrResultUtil.compareAndCheck(regexps, fields,
NAME_MTAS_DOCUMENT_REGEXP, NAME_MTAS_DOCUMENT_FIELD, false);
MtasSolrResultUtil.compareAndCheck(lists, fields, NAME_MTAS_DOCUMENT_LIST,
NAME_MTAS_DOCUMENT_FIELD, false);
MtasSolrResultUtil.compareAndCheck(ignoreRegexps, fields,
NAME_MTAS_DOCUMENT_IGNORE_REGEXP, NAME_MTAS_DOCUMENT_FIELD, false);
MtasSolrResultUtil.compareAndCheck(ignoreLists, fields,
NAME_MTAS_DOCUMENT_IGNORE_LIST, NAME_MTAS_DOCUMENT_FIELD, false);
MtasSolrResultUtil.compareAndCheck(listNumbers, fields,
NAME_MTAS_DOCUMENT_NUMBER, NAME_MTAS_DOCUMENT_FIELD, false);
for (int i = 0; i < fields.length; i++) {
String key = (keys[i] == null) || (keys[i].isEmpty())
? String.valueOf(i) + ":" + fields[i] + ":" + prefixes[i]
: keys[i].trim();
String prefix = prefixes[i];
if (prefix == null || prefix.isEmpty()) {
throw new IOException("no (valid) prefix in mtas document");
}
String type = types[i];
String regexp = regexps[i];
String[] list = null;
Boolean listRegexp = listRegexps[i];
Boolean listExpand = listExpands[i];
int listExpandNumber = listExpandNumbers[i];
if (lists[i] != null) {
ArrayList<String> tmpList = new ArrayList<>();
String[] subList = lists[i].split("(?<!\\\\),");
for (int j = 0; j < subList.length; j++) {
tmpList.add(subList[j].replace("\\,", ",").replace("\\\\", "\\"));
}
list = tmpList.toArray(new String[tmpList.size()]);
}
int listNumber = Math.max(0,
(listNumbers[i] == null) || (listNumbers[i].isEmpty()) ? 0
: Integer.parseInt(listNumbers[i]));
String ignoreRegexp = ignoreRegexps[i];
String[] ignoreList = null;
Boolean ignoreListRegexp = ignoreListRegexps[i];
if (ignoreLists[i] != null) {
ArrayList<String> tmpList = new ArrayList<>();
String[] subList = ignoreLists[i].split("(?<!\\\\),");
for (int j = 0; j < subList.length; j++) {
tmpList.add(subList[j].replace("\\,", ",").replace("\\\\", "\\"));
}
ignoreList = tmpList.toArray(new String[tmpList.size()]);
}
mtasFields.list.get(fields[i]).documentList.add(new ComponentDocument(
key, prefix, type, regexp, list, listNumber, listRegexp, listExpand,
listExpandNumber, ignoreRegexp, ignoreList, ignoreListRegexp));
}
}
} } | public class class_name {
public void prepare(ResponseBuilder rb, ComponentFields mtasFields)
throws IOException {
Set<String> ids = MtasSolrResultUtil
.getIdsFromParameters(rb.req.getParams(), PARAM_MTAS_DOCUMENT);
if (!ids.isEmpty()) {
int tmpCounter = 0;
String[] fields = new String[ids.size()];
String[] keys = new String[ids.size()];
String[] prefixes = new String[ids.size()];
String[] types = new String[ids.size()];
String[] regexps = new String[ids.size()];
String[] lists = new String[ids.size()];
Boolean[] listRegexps = new Boolean[ids.size()];
Boolean[] listExpands = new Boolean[ids.size()];
int[] listExpandNumbers = new int[ids.size()];
String[] ignoreRegexps = new String[ids.size()];
String[] ignoreLists = new String[ids.size()];
Boolean[] ignoreListRegexps = new Boolean[ids.size()];
String[] listNumbers = new String[ids.size()];
for (String id : ids) {
fields[tmpCounter] = rb.req.getParams().get(
PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_FIELD,
null);
keys[tmpCounter] = rb.req.getParams()
.get(PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_KEY,
String.valueOf(tmpCounter))
.trim();
prefixes[tmpCounter] = rb.req.getParams().get(
PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_PREFIX,
null);
types[tmpCounter] = rb.req.getParams().get(
PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_TYPE,
null);
regexps[tmpCounter] = rb.req.getParams().get(
PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_REGEXP,
null);
lists[tmpCounter] = rb.req.getParams().get(
PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_LIST,
null);
listRegexps[tmpCounter] = rb.req.getParams().getBool(PARAM_MTAS_DOCUMENT
+ "." + id + "." + NAME_MTAS_DOCUMENT_LIST_REGEXP, false);
listExpands[tmpCounter] = rb.req.getParams().getBool(PARAM_MTAS_DOCUMENT
+ "." + id + "." + NAME_MTAS_DOCUMENT_LIST_EXPAND, false);
listExpandNumbers[tmpCounter] = rb.req.getParams()
.getInt(PARAM_MTAS_DOCUMENT + "." + id + "."
+ NAME_MTAS_DOCUMENT_LIST_EXPAND_NUMBER, 10);
ignoreRegexps[tmpCounter] = rb.req.getParams().get(PARAM_MTAS_DOCUMENT
+ "." + id + "." + NAME_MTAS_DOCUMENT_IGNORE_REGEXP, null);
ignoreLists[tmpCounter] = rb.req.getParams().get(PARAM_MTAS_DOCUMENT
+ "." + id + "." + NAME_MTAS_DOCUMENT_IGNORE_LIST, null);
ignoreListRegexps[tmpCounter] = rb.req.getParams()
.getBool(PARAM_MTAS_DOCUMENT + "." + id + "."
+ NAME_MTAS_DOCUMENT_IGNORE_LIST_REGEXP, false);
listNumbers[tmpCounter] = rb.req.getParams().get(
PARAM_MTAS_DOCUMENT + "." + id + "." + NAME_MTAS_DOCUMENT_NUMBER,
null);
tmpCounter++;
}
String uniqueKeyField = rb.req.getSchema().getUniqueKeyField().getName();
mtasFields.doDocument = true;
rb.setNeedDocList(true);
for (String field : fields) {
if (field == null || field.isEmpty()) {
throw new IOException("no (valid) field in mtas document");
} else if (!mtasFields.list.containsKey(field)) {
mtasFields.list.put(field, new ComponentField(uniqueKeyField)); // depends on control dependency: [if], data = [none]
}
}
MtasSolrResultUtil.compareAndCheck(keys, fields, NAME_MTAS_DOCUMENT_KEY,
NAME_MTAS_DOCUMENT_FIELD, true);
MtasSolrResultUtil.compareAndCheck(prefixes, fields,
NAME_MTAS_DOCUMENT_PREFIX, NAME_MTAS_DOCUMENT_FIELD, false);
MtasSolrResultUtil.compareAndCheck(types, fields, NAME_MTAS_DOCUMENT_TYPE,
NAME_MTAS_DOCUMENT_FIELD, false);
MtasSolrResultUtil.compareAndCheck(regexps, fields,
NAME_MTAS_DOCUMENT_REGEXP, NAME_MTAS_DOCUMENT_FIELD, false);
MtasSolrResultUtil.compareAndCheck(lists, fields, NAME_MTAS_DOCUMENT_LIST,
NAME_MTAS_DOCUMENT_FIELD, false);
MtasSolrResultUtil.compareAndCheck(ignoreRegexps, fields,
NAME_MTAS_DOCUMENT_IGNORE_REGEXP, NAME_MTAS_DOCUMENT_FIELD, false);
MtasSolrResultUtil.compareAndCheck(ignoreLists, fields,
NAME_MTAS_DOCUMENT_IGNORE_LIST, NAME_MTAS_DOCUMENT_FIELD, false);
MtasSolrResultUtil.compareAndCheck(listNumbers, fields,
NAME_MTAS_DOCUMENT_NUMBER, NAME_MTAS_DOCUMENT_FIELD, false);
for (int i = 0; i < fields.length; i++) {
String key = (keys[i] == null) || (keys[i].isEmpty())
? String.valueOf(i) + ":" + fields[i] + ":" + prefixes[i]
: keys[i].trim();
String prefix = prefixes[i];
if (prefix == null || prefix.isEmpty()) {
throw new IOException("no (valid) prefix in mtas document");
}
String type = types[i];
String regexp = regexps[i];
String[] list = null;
Boolean listRegexp = listRegexps[i];
Boolean listExpand = listExpands[i];
int listExpandNumber = listExpandNumbers[i];
if (lists[i] != null) {
ArrayList<String> tmpList = new ArrayList<>();
String[] subList = lists[i].split("(?<!\\\\),");
for (int j = 0; j < subList.length; j++) {
tmpList.add(subList[j].replace("\\,", ",").replace("\\\\", "\\"));
}
list = tmpList.toArray(new String[tmpList.size()]);
}
int listNumber = Math.max(0,
(listNumbers[i] == null) || (listNumbers[i].isEmpty()) ? 0
: Integer.parseInt(listNumbers[i]));
String ignoreRegexp = ignoreRegexps[i];
String[] ignoreList = null;
Boolean ignoreListRegexp = ignoreListRegexps[i];
if (ignoreLists[i] != null) {
ArrayList<String> tmpList = new ArrayList<>();
String[] subList = ignoreLists[i].split("(?<!\\\\),");
for (int j = 0; j < subList.length; j++) {
tmpList.add(subList[j].replace("\\,", ",").replace("\\\\", "\\"));
}
ignoreList = tmpList.toArray(new String[tmpList.size()]);
}
mtasFields.list.get(fields[i]).documentList.add(new ComponentDocument(
key, prefix, type, regexp, list, listNumber, listRegexp, listExpand,
listExpandNumber, ignoreRegexp, ignoreList, ignoreListRegexp));
}
}
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.