language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/JavaBeanSerializerTest.java | {
"start": 4117,
"end": 4299
} | class ____ {
public List<String> getL0() {
throw new RuntimeException();
}
public void setL0(List<String> l0) {
}
}
public static | C |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxRepeatPredicate.java | {
"start": 1942,
"end": 3395
} | class ____<T>
extends Operators.MultiSubscriptionSubscriber<T, T> {
final CorePublisher<? extends T> source;
final BooleanSupplier predicate;
volatile int wip;
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<RepeatPredicateSubscriber> WIP =
AtomicIntegerFieldUpdater.newUpdater(RepeatPredicateSubscriber.class, "wip");
long produced;
RepeatPredicateSubscriber(CorePublisher<? extends T> source,
CoreSubscriber<? super T> actual, BooleanSupplier predicate) {
super(actual);
this.source = source;
this.predicate = predicate;
}
@Override
public void onNext(T t) {
produced++;
actual.onNext(t);
}
@Override
public void onComplete() {
boolean b;
try {
b = predicate.getAsBoolean();
} catch (Throwable e) {
actual.onError(Operators.onOperatorError(e, actual.currentContext()));
return;
}
if (b) {
resubscribe();
} else {
actual.onComplete();
}
}
void resubscribe() {
if (WIP.getAndIncrement(this) == 0) {
do {
if (isCancelled()) {
return;
}
long c = produced;
if (c != 0L) {
produced = 0L;
produced(c);
}
source.subscribe(this);
} while (WIP.decrementAndGet(this) != 0);
}
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
}
}
| RepeatPredicateSubscriber |
java | apache__flink | flink-core/src/test/java/org/apache/flink/util/FlinkUserCodeClassLoadersTest.java | {
"start": 9462,
"end": 9743
} | class ____ be reachable anymore
assertThatThrownBy(() -> childClassLoader.loadClass(className))
.isInstanceOf(IllegalStateException.class);
}
@Test
void testParallelCapable() {
// It will be true only if all the super classes (except | should |
java | spring-projects__spring-boot | integration-test/spring-boot-actuator-integration-tests/src/test/java/org/springframework/boot/actuate/endpoint/web/annotation/AbstractWebEndpointIntegrationTests.java | {
"start": 29282,
"end": 29564
} | class ____ {
private final EndpointDelegate delegate;
VoidDeleteResponseEndpoint(EndpointDelegate delegate) {
this.delegate = delegate;
}
@DeleteOperation
void delete() {
this.delegate.delete();
}
}
@Endpoint(id = "nullwrite")
static | VoidDeleteResponseEndpoint |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesTests.java | {
"start": 89995,
"end": 90231
} | class ____ {
private final int age;
Outer(int age) {
this.age = age;
}
int getAge() {
return this.age;
}
}
@EnableConfigurationProperties(ConstructorBindingWithOuterClassConstructorBoundProperties.class)
static | Outer |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/impl/prefetch/TestBlockData.java | {
"start": 1193,
"end": 5496
} | class ____ extends AbstractHadoopTestBase {
@Test
public void testArgChecks() throws Exception {
// Should not throw.
new BlockData(10, 5);
new BlockData(5, 10);
new BlockData(0, 10);
// Verify it throws correctly.
intercept(IllegalArgumentException.class, "'fileSize' must not be negative",
() -> new BlockData(-1, 2));
intercept(IllegalArgumentException.class,
"'blockSize' must be a positive integer",
() -> new BlockData(10, 0));
intercept(IllegalArgumentException.class,
"'blockSize' must be a positive integer",
() -> new BlockData(10, -2));
intercept(IllegalArgumentException.class,
"'blockNumber' (-1) must be within the range [0, 3]",
() -> new BlockData(10, 3).isLastBlock(
-1));
intercept(IllegalArgumentException.class,
"'blockNumber' (11) must be within the range [0, 3]",
() -> new BlockData(10, 3).isLastBlock(
11));
}
@Test
public void testComputedFields() throws Exception {
testComputedFieldsHelper(0, 10);
testComputedFieldsHelper(1, 10);
testComputedFieldsHelper(10, 1);
testComputedFieldsHelper(10, 2);
testComputedFieldsHelper(10, 3);
}
private void testComputedFieldsHelper(long fileSize, int blockSize)
throws Exception {
BlockData bd = new BlockData(fileSize, blockSize);
if (fileSize == 0) {
assertFalse(bd.isLastBlock(0));
assertFalse(bd.isLastBlock(1));
assertFalse(bd.isValidOffset(0));
assertEquals(0, bd.getSize(0));
assertEquals("", bd.getStateString());
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'offset' (0) must be within the range [0, -1]",
() -> bd.getBlockNumber(0));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'blockNumber' (0) must be within the range [0, -1]",
() -> bd.getStartOffset(0));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'offset' (0) must be within the range [0, -1]",
() -> bd.getRelativeOffset(0, 0));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'blockNumber' (0) must be within the range [0, -1]",
() -> bd.getState(0));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'blockNumber' (0) must be within the range [0, -1]",
() -> bd.setState(0, BlockData.State.READY));
return;
}
assertEquals(fileSize, bd.getFileSize());
assertEquals(blockSize, bd.getBlockSize());
int expectedNumBlocks = (int) (fileSize / blockSize);
if (fileSize % blockSize > 0) {
expectedNumBlocks++;
}
assertEquals(expectedNumBlocks, bd.getNumBlocks());
int lastBlockNumber = expectedNumBlocks - 1;
for (int b = 0; b < lastBlockNumber; b++) {
assertFalse(bd.isLastBlock(b));
assertEquals(blockSize, bd.getSize(b));
}
assertTrue(bd.isLastBlock(lastBlockNumber));
int lastBlockSize = (int) (fileSize - blockSize * (expectedNumBlocks - 1));
assertEquals(lastBlockSize, bd.getSize(lastBlockNumber));
// Offset related methods.
for (long offset = 0; offset < fileSize; offset++) {
int expectedBlockNumber = (int) (offset / blockSize);
assertEquals(expectedBlockNumber, bd.getBlockNumber(offset));
for (int b = 0; b < expectedNumBlocks - 1; b++) {
long expectedStartOffset = b * blockSize;
assertEquals(expectedStartOffset, bd.getStartOffset(b));
int expectedRelativeOffset = (int) (offset - expectedStartOffset);
assertEquals(expectedRelativeOffset, bd.getRelativeOffset(b, offset));
}
}
// State methods.
for (int b = 0; b < expectedNumBlocks; b++) {
assertEquals(b * blockSize, bd.getStartOffset(b));
assertEquals(BlockData.State.NOT_READY, bd.getState(b));
bd.setState(b, BlockData.State.QUEUED);
assertEquals(BlockData.State.QUEUED, bd.getState(b));
bd.setState(b, BlockData.State.READY);
assertEquals(BlockData.State.READY, bd.getState(b));
bd.setState(b, BlockData.State.CACHED);
assertEquals(BlockData.State.CACHED, bd.getState(b));
}
}
}
| TestBlockData |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/core/support/AbstractSqlTypeValue.java | {
"start": 2045,
"end": 3100
} | class ____ implements SqlTypeValue {
@Override
public final void setTypeValue(PreparedStatement ps, int paramIndex, int sqlType, @Nullable String typeName)
throws SQLException {
Object value = createTypeValue(ps.getConnection(), sqlType, typeName);
if (sqlType == TYPE_UNKNOWN) {
ps.setObject(paramIndex, value);
}
else {
ps.setObject(paramIndex, value, sqlType);
}
}
/**
* Create the type value to be passed into {@code PreparedStatement.setObject}.
* @param con the JDBC Connection, if needed to create any database-specific objects
* @param sqlType the SQL type of the parameter we are setting
* @param typeName the type name of the parameter
* @return the type value
* @throws SQLException if an SQLException is encountered setting
* parameter values (that is, there's no need to catch SQLException)
* @see java.sql.PreparedStatement#setObject(int, Object, int)
*/
protected abstract Object createTypeValue(Connection con, int sqlType, @Nullable String typeName)
throws SQLException;
}
| AbstractSqlTypeValue |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/DevServicesResultBuildItem.java | {
"start": 6710,
"end": 7959
} | class ____ {
private String name;
private String containerId;
private Map<String, String> config;
private String description;
public DiscoveredServiceBuilder name(String name) {
this.name = Objects.requireNonNull(name, "name cannot be null");
return this;
}
public DiscoveredServiceBuilder feature(Feature feature) {
this.name = feature.getName();
return this;
}
public DiscoveredServiceBuilder containerId(String containerId) {
this.containerId = containerId;
return this;
}
public DiscoveredServiceBuilder config(Map<String, String> config) {
this.config = config;
return this;
}
public DiscoveredServiceBuilder description(String description) {
this.description = description;
return this;
}
public DevServicesResultBuildItem build() {
if (name == null) {
throw new IllegalStateException("name cannot be null");
}
return new DevServicesResultBuildItem(name, description, containerId, config);
}
}
public static | DiscoveredServiceBuilder |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesReservation.java | {
"start": 6085,
"end": 6168
} | class ____ a filter in the Guice injector for the
* MockRM
*/
public static | as |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/DefaultS3ClientFactory.java | {
"start": 4564,
"end": 20692
} | class ____ extends Configured
implements S3ClientFactory {
private static final String REQUESTER_PAYS_HEADER_VALUE = "requester";
private static final String S3_SERVICE_NAME = "s3";
private static final Pattern VPC_ENDPOINT_PATTERN =
Pattern.compile("^(?:.+\\.)?([a-z0-9-]+)\\.vpce\\.amazonaws\\.(?:com|com\\.cn)$");
/**
* Subclasses refer to this.
*/
protected static final Logger LOG =
LoggerFactory.getLogger(DefaultS3ClientFactory.class);
/**
* A one-off warning of default region chains in use.
*/
private static final LogExactlyOnce WARN_OF_DEFAULT_REGION_CHAIN =
new LogExactlyOnce(LOG);
/**
* Warning message printed when the SDK Region chain is in use.
*/
private static final String SDK_REGION_CHAIN_IN_USE =
"S3A filesystem client is using"
+ " the SDK region resolution chain.";
/** Exactly once log to inform about ignoring the AWS-SDK Warnings for CSE. */
private static final LogExactlyOnce IGNORE_CSE_WARN = new LogExactlyOnce(LOG);
/**
* Error message when an endpoint is set with FIPS enabled: {@value}.
*/
@VisibleForTesting
public static final String ERROR_ENDPOINT_WITH_FIPS =
"Non central endpoint cannot be set when " + FIPS_ENDPOINT + " is true";
/**
* A one-off log stating whether S3 Access Grants are enabled.
*/
private static final LogExactlyOnce LOG_S3AG_ENABLED = new LogExactlyOnce(LOG);
@Override
public S3Client createS3Client(
final URI uri,
final S3ClientCreationParameters parameters) throws IOException {
Configuration conf = getConf();
String bucket = uri.getHost();
ApacheHttpClient.Builder httpClientBuilder = AWSClientConfig
.createHttpClientBuilder(conf)
.proxyConfiguration(AWSClientConfig.createProxyConfiguration(conf, bucket));
return configureClientBuilder(S3Client.builder(), parameters, conf, bucket)
.httpClientBuilder(httpClientBuilder)
.build();
}
@Override
public S3AsyncClient createS3AsyncClient(
final URI uri,
final S3ClientCreationParameters parameters) throws IOException {
Configuration conf = getConf();
String bucket = uri.getHost();
NettyNioAsyncHttpClient.Builder httpClientBuilder = AWSClientConfig
.createAsyncHttpClientBuilder(conf)
.proxyConfiguration(AWSClientConfig.createAsyncProxyConfiguration(conf, bucket));
MultipartConfiguration multipartConfiguration = MultipartConfiguration.builder()
.minimumPartSizeInBytes(parameters.getMinimumPartSize())
.thresholdInBytes(parameters.getMultiPartThreshold())
.build();
S3AsyncClientBuilder s3AsyncClientBuilder =
configureClientBuilder(S3AsyncClient.builder(), parameters, conf, bucket)
.httpClientBuilder(httpClientBuilder);
// multipart upload pending with HADOOP-19326.
if (!parameters.isClientSideEncryptionEnabled() &&
!parameters.isAnalyticsAcceleratorEnabled()) {
s3AsyncClientBuilder.multipartConfiguration(multipartConfiguration)
.multipartEnabled(parameters.isMultipartCopy());
}
return s3AsyncClientBuilder.build();
}
@Override
public S3TransferManager createS3TransferManager(final S3AsyncClient s3AsyncClient) {
return S3TransferManager.builder()
.s3Client(s3AsyncClient)
.build();
}
/**
* Configure a sync or async S3 client builder.
* This method handles all shared configuration, including
* path style access, credentials and whether or not to use S3Express
* CreateSession.
* @param builder S3 client builder
* @param parameters parameter object
* @param conf configuration object
* @param bucket bucket name
* @return the builder object
* @param <BuilderT> S3 client builder type
* @param <ClientT> S3 client type
*/
private <BuilderT extends S3BaseClientBuilder<BuilderT, ClientT>, ClientT> BuilderT configureClientBuilder(
BuilderT builder, S3ClientCreationParameters parameters, Configuration conf, String bucket)
throws IOException {
configureEndpointAndRegion(builder, parameters, conf);
// add a plugin to add a Content-MD5 header.
// this is required when performing some operations with third party stores
// (for example: bulk delete), and is somewhat harmless when working with AWS S3.
if (parameters.isMd5HeaderEnabled()) {
LOG.debug("MD5 header enabled");
builder.addPlugin(LegacyMd5Plugin.create());
}
//when to calculate request checksums.
final RequestChecksumCalculation checksumCalculation =
parameters.isChecksumCalculationEnabled()
? RequestChecksumCalculation.WHEN_SUPPORTED
: RequestChecksumCalculation.WHEN_REQUIRED;
LOG.debug("Using checksum calculation policy: {}", checksumCalculation);
builder.requestChecksumCalculation(checksumCalculation);
// response checksum validation. Slow, even with CRC32 checksums.
final ResponseChecksumValidation checksumValidation;
checksumValidation = parameters.isChecksumValidationEnabled()
? ResponseChecksumValidation.WHEN_SUPPORTED
: ResponseChecksumValidation.WHEN_REQUIRED;
LOG.debug("Using checksum validation policy: {}", checksumValidation);
builder.responseChecksumValidation(checksumValidation);
maybeApplyS3AccessGrantsConfigurations(builder, conf);
S3Configuration serviceConfiguration = S3Configuration.builder()
.pathStyleAccessEnabled(parameters.isPathStyleAccess())
.build();
final ClientOverrideConfiguration.Builder override =
createClientOverrideConfiguration(parameters, conf);
S3BaseClientBuilder<BuilderT, ClientT> s3BaseClientBuilder = builder
.overrideConfiguration(override.build())
.credentialsProvider(parameters.getCredentialSet())
.disableS3ExpressSessionAuth(!parameters.isExpressCreateSession())
.serviceConfiguration(serviceConfiguration);
if (LOG.isTraceEnabled()) {
// if this log is set to "trace" then we turn on logging of SDK metrics.
// The metrics itself will log at info; it is just that reflection work
// would be needed to change that setting safely for shaded and unshaded aws artifacts.
s3BaseClientBuilder.overrideConfiguration(o ->
o.addMetricPublisher(LoggingMetricPublisher.create()));
}
if (conf.getBoolean(HTTP_SIGNER_ENABLED, HTTP_SIGNER_ENABLED_DEFAULT)) {
// use an http signer through an AuthScheme
final AuthScheme<AwsCredentialsIdentity> signer =
createHttpSigner(conf, AUTH_SCHEME_AWS_SIGV_4, HTTP_SIGNER_CLASS_NAME);
builder.putAuthScheme(signer);
}
return (BuilderT) s3BaseClientBuilder;
}
/**
* Create an override configuration for an S3 client.
* @param parameters parameter object
* @param conf configuration object
* @throws IOException any IOE raised, or translated exception
* @throws RuntimeException some failures creating an http signer
* @return the override configuration
* @throws IOException any IOE raised, or translated exception
*/
protected ClientOverrideConfiguration.Builder createClientOverrideConfiguration(
S3ClientCreationParameters parameters, Configuration conf) throws IOException {
final ClientOverrideConfiguration.Builder clientOverrideConfigBuilder =
AWSClientConfig.createClientConfigBuilder(conf, AWS_SERVICE_IDENTIFIER_S3);
// add any headers
parameters.getHeaders().forEach((h, v) -> clientOverrideConfigBuilder.putHeader(h, v));
if (parameters.isRequesterPays()) {
// All calls must acknowledge requester will pay via header.
clientOverrideConfigBuilder.putHeader(REQUESTER_PAYS_HEADER, REQUESTER_PAYS_HEADER_VALUE);
}
if (!StringUtils.isEmpty(parameters.getUserAgentSuffix())) {
clientOverrideConfigBuilder.putAdvancedOption(SdkAdvancedClientOption.USER_AGENT_SUFFIX,
parameters.getUserAgentSuffix());
}
if (parameters.getExecutionInterceptors() != null) {
for (ExecutionInterceptor interceptor : parameters.getExecutionInterceptors()) {
clientOverrideConfigBuilder.addExecutionInterceptor(interceptor);
}
}
if (parameters.getMetrics() != null) {
clientOverrideConfigBuilder.addMetricPublisher(
new AwsStatisticsCollector(parameters.getMetrics()));
}
final RetryPolicy.Builder retryPolicyBuilder = AWSClientConfig.createRetryPolicyBuilder(conf);
clientOverrideConfigBuilder.retryPolicy(retryPolicyBuilder.build());
return clientOverrideConfigBuilder;
}
/**
* This method configures the endpoint and region for a S3 client.
* The order of configuration is:
*
* <ol>
* <li>If region is configured via fs.s3a.endpoint.region, use it.</li>
* <li>If endpoint is configured via via fs.s3a.endpoint, set it.
* If no region is configured, try to parse region from endpoint. </li>
* <li> If no region is configured, and it could not be parsed from the endpoint,
* set the default region as US_EAST_2</li>
* <li> If configured region is empty, fallback to SDK resolution chain. </li>
* <li> S3 cross region is enabled by default irrespective of region or endpoint
* is set or not.</li>
* </ol>
*
* @param builder S3 client builder.
* @param parameters parameter object
* @param conf conf configuration object
* @param <BuilderT> S3 client builder type
* @param <ClientT> S3 client type
* @throws IllegalArgumentException if endpoint is set when FIPS is enabled.
*/
private <BuilderT extends S3BaseClientBuilder<BuilderT, ClientT>, ClientT> void configureEndpointAndRegion(
BuilderT builder, S3ClientCreationParameters parameters, Configuration conf) {
final String endpointStr = parameters.getEndpoint();
final URI endpoint = getS3Endpoint(endpointStr, conf);
final String configuredRegion = parameters.getRegion();
Region region = null;
String origin = "";
// If the region was configured, set it.
if (configuredRegion != null && !configuredRegion.isEmpty()) {
origin = AWS_REGION;
region = Region.of(configuredRegion);
}
// FIPs? Log it, then reject any attempt to set an endpoint
final boolean fipsEnabled = parameters.isFipsEnabled();
if (fipsEnabled) {
LOG.debug("Enabling FIPS mode");
}
// always setting it guarantees the value is non-null,
// which tests expect.
builder.fipsEnabled(fipsEnabled);
if (endpoint != null) {
boolean endpointEndsWithCentral =
endpointStr.endsWith(CENTRAL_ENDPOINT);
checkArgument(!fipsEnabled || endpointEndsWithCentral, "%s : %s",
ERROR_ENDPOINT_WITH_FIPS,
endpoint);
// No region was configured,
// determine the region from the endpoint.
if (region == null) {
region = getS3RegionFromEndpoint(endpointStr,
endpointEndsWithCentral);
if (region != null) {
origin = "endpoint";
}
}
// No need to override endpoint with "s3.amazonaws.com".
// Let the client take care of endpoint resolution. Overriding
// the endpoint with "s3.amazonaws.com" causes 400 Bad Request
// errors for non-existent buckets and objects.
// ref: https://github.com/aws/aws-sdk-java-v2/issues/4846
if (!endpointEndsWithCentral) {
builder.endpointOverride(endpoint);
LOG.debug("Setting endpoint to {}", endpoint);
} else {
origin = "central endpoint with cross region access";
LOG.debug("Enabling cross region access for endpoint {}",
endpointStr);
}
}
if (region != null) {
builder.region(region);
} else if (configuredRegion == null) {
// no region is configured, and none could be determined from the endpoint.
// Use US_EAST_2 as default.
region = Region.of(AWS_S3_DEFAULT_REGION);
builder.region(region);
origin = "cross region access fallback";
} else if (configuredRegion.isEmpty()) {
// region configuration was set to empty string.
// allow this if people really want it; it is OK to rely on this
// when deployed in EC2.
WARN_OF_DEFAULT_REGION_CHAIN.warn(SDK_REGION_CHAIN_IN_USE);
LOG.debug(SDK_REGION_CHAIN_IN_USE);
origin = "SDK region chain";
}
boolean isCrossRegionAccessEnabled = conf.getBoolean(AWS_S3_CROSS_REGION_ACCESS_ENABLED,
AWS_S3_CROSS_REGION_ACCESS_ENABLED_DEFAULT);
// s3 cross region access
if (isCrossRegionAccessEnabled) {
builder.crossRegionAccessEnabled(true);
}
LOG.debug("Setting region to {} from {} with cross region access {}",
region, origin, isCrossRegionAccessEnabled);
}
/**
* Given a endpoint string, create the endpoint URI.
*
* @param endpoint possibly null endpoint.
* @param conf config to build the URI from.
* @return an endpoint uri
*/
protected static URI getS3Endpoint(String endpoint, final Configuration conf) {
boolean secureConnections = conf.getBoolean(SECURE_CONNECTIONS, DEFAULT_SECURE_CONNECTIONS);
String protocol = secureConnections ? "https" : "http";
if (endpoint == null || endpoint.isEmpty()) {
// don't set an endpoint if none is configured, instead let the SDK figure it out.
return null;
}
if (!endpoint.contains("://")) {
endpoint = String.format("%s://%s", protocol, endpoint);
}
try {
return new URI(endpoint);
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
}
/**
* Parses the endpoint to get the region.
* If endpoint is the central one, use US_EAST_2.
*
* @param endpoint the configure endpoint.
* @param endpointEndsWithCentral true if the endpoint is configured as central.
* @return the S3 region, null if unable to resolve from endpoint.
*/
@VisibleForTesting
static Region getS3RegionFromEndpoint(final String endpoint,
final boolean endpointEndsWithCentral) {
if (!endpointEndsWithCentral) {
// S3 VPC endpoint parsing
Matcher matcher = VPC_ENDPOINT_PATTERN.matcher(endpoint);
if (matcher.find()) {
LOG.debug("Mapping to VPCE");
LOG.debug("Endpoint {} is vpc endpoint; parsing region as {}", endpoint, matcher.group(1));
return Region.of(matcher.group(1));
}
LOG.debug("Endpoint {} is not the default; parsing", endpoint);
return AwsHostNameUtils.parseSigningRegion(endpoint, S3_SERVICE_NAME).orElse(null);
}
// Select default region here to enable cross-region access.
// If both "fs.s3a.endpoint" and "fs.s3a.endpoint.region" are empty,
// Spark sets "fs.s3a.endpoint" to "s3.amazonaws.com".
// This applies to Spark versions with the changes of SPARK-35878.
// ref:
// https://github.com/apache/spark/blob/v3.5.0/core/
// src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala#L528
// If we do not allow cross region access, Spark would not be able to
// access any bucket that is not present in the given region.
// Hence, we should use default region us-east-2 to allow cross-region
// access.
return Region.of(AWS_S3_DEFAULT_REGION);
}
private static <BuilderT extends S3BaseClientBuilder<BuilderT, ClientT>, ClientT> void
maybeApplyS3AccessGrantsConfigurations(BuilderT builder, Configuration conf) {
boolean isS3AccessGrantsEnabled = conf.getBoolean(AWS_S3_ACCESS_GRANTS_ENABLED, false);
if (!isS3AccessGrantsEnabled){
LOG.debug("S3 Access Grants plugin is not enabled.");
return;
}
boolean isFallbackEnabled =
conf.getBoolean(AWS_S3_ACCESS_GRANTS_FALLBACK_TO_IAM_ENABLED, false);
S3AccessGrantsPlugin accessGrantsPlugin =
S3AccessGrantsPlugin.builder()
.enableFallback(isFallbackEnabled)
.build();
builder.addPlugin(accessGrantsPlugin);
LOG_S3AG_ENABLED.info(
"S3 Access Grants plugin is enabled with IAM fallback set to {}", isFallbackEnabled);
}
}
| DefaultS3ClientFactory |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/MultipleInheritanceTest.java | {
"start": 1332,
"end": 1900
} | class ____ {
@Test
public void test(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( session -> {
Car car = new Car();
CarPart carPart = new CarPart();
CarPK id = new CarPK();
id.carId1 = "1";
carPart.id = id;
session.persist( carPart );
car.id = id;
car.parts = carPart;
((BasicCar) car).parts = carPart;
session.persist( car );
session.flush();
session.clear();
Car loadedCar = session.find( Car.class, id );
assertNotNull( loadedCar.parts );
} );
}
@Embeddable
public static | MultipleInheritanceTest |
java | spring-projects__spring-framework | spring-websocket/src/main/java/org/springframework/web/socket/config/WebSocketNamespaceHandler.java | {
"start": 960,
"end": 1452
} | class ____ extends NamespaceHandlerSupport {
private static final boolean SPRING_MESSAGING_PRESENT = ClassUtils.isPresent(
"org.springframework.messaging.Message", WebSocketNamespaceHandler.class.getClassLoader());
@Override
public void init() {
registerBeanDefinitionParser("handlers", new HandlersBeanDefinitionParser());
if (SPRING_MESSAGING_PRESENT) {
registerBeanDefinitionParser("message-broker", new MessageBrokerBeanDefinitionParser());
}
}
}
| WebSocketNamespaceHandler |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/util/TestPooledBufferProvider.java | {
"start": 1665,
"end": 4336
} | class ____ implements BufferProvider {
private final BlockingQueue<MemorySegment> segments = new LinkedBlockingDeque<>();
private final TestBufferFactory bufferFactory;
private final PooledBufferProviderRecycler bufferRecycler;
public TestPooledBufferProvider(int poolSize) {
this(poolSize, 32 * 1024);
}
public TestPooledBufferProvider(int poolSize, int bufferSize) {
checkArgument(poolSize > 0);
this.bufferRecycler = new PooledBufferProviderRecycler(segments);
this.bufferFactory = new TestBufferFactory(poolSize, bufferSize, bufferRecycler);
}
@Override
public Buffer requestBuffer() {
MemorySegment memorySegment = requestMemorySegment();
return memorySegment == null ? null : new NetworkBuffer(memorySegment, bufferRecycler);
}
@Override
public BufferBuilder requestBufferBuilder() {
MemorySegment memorySegment = requestMemorySegment();
if (memorySegment != null) {
return new BufferBuilder(memorySegment, bufferRecycler);
}
return null;
}
@Override
public BufferBuilder requestBufferBuilder(int targetChannel) {
return requestBufferBuilder();
}
@Override
public BufferBuilder requestBufferBuilderBlocking() throws InterruptedException {
return new BufferBuilder(requestMemorySegmentBlocking(), bufferRecycler);
}
@Override
public BufferBuilder requestBufferBuilderBlocking(int targetChannel)
throws InterruptedException {
return requestBufferBuilderBlocking();
}
@Override
public boolean addBufferListener(BufferListener listener) {
return bufferRecycler.registerListener(listener);
}
@Override
public boolean isDestroyed() {
return false;
}
@Override
public MemorySegment requestMemorySegment() {
final MemorySegment buffer = segments.poll();
if (buffer != null) {
return buffer;
}
return bufferFactory.createMemorySegment();
}
@Override
public MemorySegment requestMemorySegmentBlocking() throws InterruptedException {
MemorySegment buffer = segments.poll();
if (buffer != null) {
return buffer;
}
buffer = bufferFactory.createMemorySegment();
if (buffer != null) {
return buffer;
}
return segments.take();
}
@Override
public CompletableFuture<?> getAvailableFuture() {
return AVAILABLE;
}
public int getNumberOfAvailableSegments() {
return segments.size();
}
private static | TestPooledBufferProvider |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/array/ShortPrimitiveArraySerializer.java | {
"start": 3576,
"end": 3791
} | class ____
extends SimpleTypeSerializerSnapshot<short[]> {
public ShortPrimitiveArraySerializerSnapshot() {
super(() -> INSTANCE);
}
}
}
| ShortPrimitiveArraySerializerSnapshot |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java | {
"start": 49438,
"end": 49550
} | class ____ keys in this file. */
public Class getKeyClass() { return keyClass; }
/** @return Returns the | of |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/web/servlet/ServletContextInitializerBeansTests.java | {
"start": 12505,
"end": 12774
} | class ____ {
static final int ORDER = 5;
@Bean
@ServletRegistration
@Order(ORDER)
OrderedTestServlet testServlet() {
return new OrderedTestServlet();
}
}
@Configuration(proxyBeanMethods = false)
static | OrderedServletConfigurationWithAnnotationAndOrder |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ContainerUpdates.java | {
"start": 1073,
"end": 2175
} | class ____ {
final List<UpdateContainerRequest> increaseRequests = new ArrayList<>();
final List<UpdateContainerRequest> decreaseRequests = new ArrayList<>();
final List<UpdateContainerRequest> promotionRequests = new ArrayList<>();
final List<UpdateContainerRequest> demotionRequests = new ArrayList<>();
/**
* Returns Container Increase Requests.
* @return Container Increase Requests.
*/
public List<UpdateContainerRequest> getIncreaseRequests() {
return increaseRequests;
}
/**
* Returns Container Decrease Requests.
* @return Container Decrease Requests.
*/
public List<UpdateContainerRequest> getDecreaseRequests() {
return decreaseRequests;
}
/**
* Returns Container Promotion Requests.
* @return Container Promotion Requests.
*/
public List<UpdateContainerRequest> getPromotionRequests() {
return promotionRequests;
}
/**
* Returns Container Demotion Requests.
* @return Container Demotion Requests.
*/
public List<UpdateContainerRequest> getDemotionRequests() {
return demotionRequests;
}
}
| ContainerUpdates |
java | apache__camel | components/camel-xslt-saxon/src/test/java/org/apache/camel/component/xslt/SaxonInvalidXsltFileTest.java | {
"start": 1201,
"end": 2108
} | class ____ {
@Test
public void testInvalidStylesheet() {
try {
RouteBuilder builder = createRouteBuilder();
try (CamelContext context = new DefaultCamelContext()) {
context.addRoutes(builder);
context.start();
fail("Should have thrown an exception due XSL compilation error");
}
} catch (Exception e) {
// expected
assertIsInstanceOf(TransformerException.class, e.getCause().getCause().getCause());
}
}
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("seda:a").to(
"xslt-saxon:org/apache/camel/component/xslt/notfound.xsl?transformerFactoryClass=net.sf.saxon.TransformerFactoryImpl");
}
};
}
}
| SaxonInvalidXsltFileTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryOptionalGetTest.java | {
"start": 9058,
"end": 9790
} | class ____ {
private void home() {
OptionalDouble opDouble = OptionalDouble.of(1.0);
OptionalInt opInt = OptionalInt.of(1);
OptionalLong opLong = OptionalLong.of(1L);
opDouble.ifPresent(x -> System.out.println(opDouble.getAsDouble()));
opInt.ifPresent(x -> System.out.println(opInt.getAsInt()));
opLong.ifPresent(x -> System.out.println(opLong.getAsLong()));
}
}
""")
.addOutputLines(
"Test.java",
"""
import java.util.OptionalDouble;
import java.util.OptionalInt;
import java.util.OptionalLong;
public | Test |
java | google__error-prone | core/src/test/java/com/google/errorprone/fixes/SuggestedFixesTest.java | {
"start": 68746,
"end": 69224
} | class ____ {
// BUG: Diagnostic contains: [QualifyTypeLocalClassChecker] Object
static Object staticField = new Object() {};
// BUG: Diagnostic contains: [QualifyTypeLocalClassChecker] Object
Object instanceField = new Object() {};
static {
// BUG: Diagnostic contains: [QualifyTypeLocalClassChecker] Object
new Object() {};
}
{
| Test |
java | apache__camel | test-infra/camel-test-infra-azure-storage-datalake/src/test/java/org/apache/camel/test/infra/azure/storage/datalake/services/AzureStorageDataLakeServiceFactory.java | {
"start": 1463,
"end": 1584
} | class ____ extends AzureStorageDataLakeRemoteInfraService implements AzureService {
}
}
| AzureStorageDataLakeRemoteService |
java | google__guava | android/guava-tests/test/com/google/common/collect/TreeTraverserTest.java | {
"start": 1503,
"end": 3530
} | class ____ extends Node {
final List<Tree> children;
Tree(char value, Tree... children) {
super(value);
this.children = asList(children);
}
}
private static final TreeTraverser<Tree> ADAPTER =
new TreeTraverser<Tree>() {
@Override
public Iterable<Tree> children(Tree node) {
return node.children;
}
};
private static final TreeTraverser<Tree> ADAPTER_USING_USING =
TreeTraverser.using(
new Function<Tree, Iterable<Tree>>() {
@Override
public Iterable<Tree> apply(Tree node) {
return node.children;
}
});
// h
// / | \
// / e \
// d g
// /|\ |
// / | \ f
// a b c
static final Tree a = new Tree('a');
static final Tree b = new Tree('b');
static final Tree c = new Tree('c');
static final Tree d = new Tree('d', a, b, c);
static final Tree e = new Tree('e');
static final Tree f = new Tree('f');
static final Tree g = new Tree('g', f);
static final Tree h = new Tree('h', d, e, g);
static String iterationOrder(Iterable<? extends Node> iterable) {
StringBuilder builder = new StringBuilder();
for (Node t : iterable) {
builder.append(t.value);
}
return builder.toString();
}
public void testPreOrder() {
assertThat(iterationOrder(ADAPTER.preOrderTraversal(h))).isEqualTo("hdabcegf");
}
public void testPostOrder() {
assertThat(iterationOrder(ADAPTER.postOrderTraversal(h))).isEqualTo("abcdefgh");
}
public void testBreadthOrder() {
assertThat(iterationOrder(ADAPTER.breadthFirstTraversal(h))).isEqualTo("hdegabcf");
}
public void testUsing() {
assertThat(iterationOrder(ADAPTER_USING_USING.preOrderTraversal(h))).isEqualTo("hdabcegf");
}
@J2ktIncompatible
@GwtIncompatible // NullPointerTester
public void testNulls() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicInstanceMethods(ADAPTER);
}
}
| Tree |
java | apache__camel | core/camel-yaml-io/src/test/java/org/apache/camel/yaml/out/ModelWriterTest.java | {
"start": 2112,
"end": 12850
} | class ____ {
@Test
public void testTimerLog() throws Exception {
StringWriter sw = new StringWriter();
ModelWriter writer = new ModelWriter(sw);
RouteDefinition route = new RouteDefinition();
route.setId("myRoute0");
route.setInput(new FromDefinition("timer:yaml?period=1234&includeMetadata=true"));
SetBodyDefinition sb = new SetBodyDefinition();
sb.setExpression(new ConstantExpression("Hello from yaml"));
route.addOutput(sb);
route.addOutput(new LogDefinition("${body}"));
writer.writeRouteDefinition(route);
String out = sw.toString();
String expected = stripLineComments(Paths.get("src/test/resources/route0b.yaml"), "#", true);
Assertions.assertEquals(expected, out);
}
@Test
public void testFromTo() throws Exception {
StringWriter sw = new StringWriter();
ModelWriter writer = new ModelWriter(sw);
RouteDefinition route = new RouteDefinition();
route.setId("myRoute1");
route.setInput(new FromDefinition("direct:start"));
ToDefinition to = new ToDefinition("log:input");
route.addOutput(to);
ToDefinition to2 = new ToDefinition("mock:result");
to2.setPattern("InOut");
route.addOutput(to2);
writer.writeRouteDefinition(route);
String out = sw.toString();
String expected = stripLineComments(Paths.get("src/test/resources/route1.yaml"), "#", true);
Assertions.assertEquals(expected, out);
}
@Test
public void testFromSplitTo() throws Exception {
StringWriter sw = new StringWriter();
ModelWriter writer = new ModelWriter(sw);
RouteDefinition route = new RouteDefinition();
route.setId("myRoute2");
route.setInput(new FromDefinition("direct:start2"));
SplitDefinition sp = new SplitDefinition();
SimpleExpression e = new SimpleExpression("${body}");
e.setResultTypeName("int.class");
sp.setExpression(e);
sp.setStreaming("true");
route.addOutput(sp);
ToDefinition to = new ToDefinition("kafka:line");
sp.addOutput(to);
to = new ToDefinition("mock:result2");
route.addOutput(to);
writer.writeRouteDefinition(route);
String out = sw.toString();
String expected = stripLineComments(Paths.get("src/test/resources/route2.yaml"), "#", true);
Assertions.assertEquals(expected, out);
}
@Test
public void testFromAggregateTo() throws Exception {
StringWriter sw = new StringWriter();
ModelWriter writer = new ModelWriter(sw);
RouteDefinition route = new RouteDefinition();
route.setId("myRoute3");
route.setInput(new FromDefinition("direct:start2"));
final AggregateDefinition ag = createAggregateDefinition();
route.addOutput(ag);
ToDefinition to = new ToDefinition("kafka:line");
ag.addOutput(to);
to = new ToDefinition("mock:result2");
route.addOutput(to);
writer.writeRouteDefinition(route);
String out = sw.toString();
String expected = stripLineComments(Paths.get("src/test/resources/route3.yaml"), "#", true);
Assertions.assertEquals(expected, out);
}
private static AggregateDefinition createAggregateDefinition() {
AggregateDefinition ag = new AggregateDefinition();
SimpleExpression e = new SimpleExpression("${body}");
e.setResultTypeName("int.class");
ag.setExpression(e);
ag.setCorrelationExpression(new ExpressionSubElementDefinition(new HeaderExpression("myHeader")));
ConstantExpression cons = new ConstantExpression("5");
cons.setResultTypeName("int.class");
ag.setCompletionSizeExpression(new ExpressionSubElementDefinition(cons));
ag.setCompletionTimeoutExpression(new ExpressionSubElementDefinition(new ConstantExpression("4000")));
return ag;
}
@Test
public void testFromSetBodyTo() throws Exception {
StringWriter sw = new StringWriter();
ModelWriter writer = new ModelWriter(sw);
RouteDefinition route = new RouteDefinition();
route.setId("myRoute4");
route.setInput(new FromDefinition("direct:start"));
SetBodyDefinition body = new SetBodyDefinition();
body.setExpression(new ConstantExpression("{\n key: '123'\n}"));
route.addOutput(body);
ToDefinition to = new ToDefinition("mock:result");
route.addOutput(to);
writer.writeRouteDefinition(route);
String out = sw.toString();
String expected = stripLineComments(Paths.get("src/test/resources/route4.yaml"), "#", true);
Assertions.assertEquals(expected, out);
}
@Test
public void testFromLogSetBodyTo() throws Exception {
StringWriter sw = new StringWriter();
ModelWriter writer = new ModelWriter(sw);
RouteDefinition route = new RouteDefinition();
route.setId("myRoute5");
route.setInput(new FromDefinition("direct:start"));
LogDefinition log = new LogDefinition();
log.setLoggingLevel("WARN");
log.setLogName("myLogger");
route.addOutput(log);
SetBodyDefinition body = new SetBodyDefinition();
body.setExpression(new SimpleExpression("${body}"));
route.addOutput(body);
ToDefinition to = new ToDefinition("mock:result");
route.addOutput(to);
writer.writeRouteDefinition(route);
String out = sw.toString();
String expected = stripLineComments(Paths.get("src/test/resources/route5.yaml"), "#", true);
Assertions.assertEquals(expected, out);
}
@Disabled("TODO: https://issues.apache.org/jira/browse/CAMEL-21490")
@Test
public void testFromChoice() throws Exception {
StringWriter sw = new StringWriter();
ModelWriter writer = new ModelWriter(sw);
RouteDefinition route = new RouteDefinition();
route.setId("myRoute6");
route.setInput(new FromDefinition("direct:start6"));
ChoiceDefinition choice = new ChoiceDefinition();
route.addOutput(choice);
choice.when().simple("${header.age} < 21").to("mock:young");
choice.when().simple("${header.age} > 21 && ${header.age} < 70").to("mock:work");
choice.otherwise().to("mock:senior");
ToDefinition to = new ToDefinition("mock:result");
route.addOutput(to);
writer.writeRouteDefinition(route);
String out = sw.toString();
String expected = stripLineComments(Paths.get("src/test/resources/route6.yaml"), "#", true);
Assertions.assertEquals(expected, out);
}
@Test
public void testFromTryCatch() throws Exception {
StringWriter sw = new StringWriter();
ModelWriter writer = new ModelWriter(sw);
CamelContext context = new DefaultCamelContext();
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start7").routeId("myRoute7")
.doTry()
.to("mock:try1")
.to("mock:try2")
.doCatch(IOException.class)
.to("mock:io1")
.to("mock:io2")
.doFinally()
.to("mock:finally1")
.to("mock:finally2")
.end()
.to("mock:result");
}
});
ModelCamelContext mcc = (ModelCamelContext) context;
writer.writeRouteDefinition(mcc.getRouteDefinition("myRoute7"));
String out = sw.toString();
String expected = stripLineComments(Paths.get("src/test/resources/route7.yaml"), "#", true);
Assertions.assertEquals(expected, out);
}
@Test
public void testTwoRoutes() throws Exception {
StringWriter sw = new StringWriter();
ModelWriter writer = new ModelWriter(sw);
RoutesDefinition routes = new RoutesDefinition();
RouteDefinition route = new RouteDefinition();
route.setId("myRoute0");
route.setInput(new FromDefinition("timer:yaml?period=1234"));
SetBodyDefinition sb = new SetBodyDefinition();
sb.setExpression(new ConstantExpression("Hello from yaml"));
route.addOutput(sb);
route.addOutput(new LogDefinition("${body}"));
routes.getRoutes().add(route);
route = new RouteDefinition();
route.setId("myRoute1");
route.setInput(new FromDefinition("direct:start"));
ToDefinition to = new ToDefinition("log:input");
route.addOutput(to);
ToDefinition to2 = new ToDefinition("mock:result");
to2.setPattern("InOut");
route.addOutput(to2);
routes.getRoutes().add(route);
writer.writeRoutesDefinition(routes);
String out = sw.toString();
String expected = stripLineComments(Paths.get("src/test/resources/route8b.yaml"), "#", true);
Assertions.assertEquals(expected, out);
}
@Test
public void testMarshal() throws Exception {
StringWriter sw = new StringWriter();
ModelWriter writer = new ModelWriter(sw);
RouteDefinition route = new RouteDefinition();
route.setId("myRoute9");
route.setInput(new FromDefinition("timer:foo"));
MarshalDefinition mar = new MarshalDefinition();
mar.setDataFormatType(new CsvDataFormat());
route.addOutput(mar);
route.addOutput(new LogDefinition("${body}"));
writer.writeRouteDefinition(route);
String out = sw.toString();
String expected = stripLineComments(Paths.get("src/test/resources/route9.yaml"), "#", true);
Assertions.assertEquals(expected, out);
}
@Test
@Disabled("CAMEL-20402")
public void testRest() throws Exception {
StringWriter sw = new StringWriter();
ModelWriter writer = new ModelWriter(sw);
RestDefinition rest = new RestDefinition();
rest.verb("get").to("direct:start");
writer.writeRestDefinition(rest);
RouteDefinition route = new RouteDefinition();
route.setId("myRoute10");
route.setInput(new FromDefinition("direct:start"));
SetBodyDefinition sb = new SetBodyDefinition(new SimpleExpression("${body}${body}"));
route.addOutput(sb);
writer.writeRouteDefinition(route);
String out = sw.toString();
String expected = stripLineComments(Paths.get("src/test/resources/route10.yaml"), "#", true);
Assertions.assertEquals(expected, out);
}
}
| ModelWriterTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RGeo.java | {
"start": 833,
"end": 14580
} | interface ____<V> extends RScoredSortedSet<V>, RGeoAsync<V> {
/**
* Adds geospatial member.
*
* @param longitude - longitude of object
* @param latitude - latitude of object
* @param member - object itself
* @return number of elements added to the sorted set,
* not including elements already existing for which
* the score was updated
*/
long add(double longitude, double latitude, V member);
/**
* Adds geospatial members.
*
* @param entries - objects
* @return number of elements added to the sorted set,
* not including elements already existing for which
* the score was updated
*/
long add(GeoEntry... entries);
/**
* Adds geospatial member only if it's already exists.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param longitude - longitude of object
* @param latitude - latitude of object
* @param member - object itself
* @return number of elements added to the sorted set
*/
Boolean addIfExists(double longitude, double latitude, V member);
/**
* Adds geospatial members only if it's already exists.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param entries - objects
* @return number of elements added to the sorted set
*/
long addIfExists(GeoEntry... entries);
/**
* Adds geospatial member only if has not been added before.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param longitude - longitude of object
* @param latitude - latitude of object
* @param member - object itself
* @return number of elements added to the sorted set
*/
boolean tryAdd(double longitude, double latitude, V member);
/**
* Adds geospatial members only if has not been added before.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param entries - objects
* @return number of elements added to the sorted set
*/
long tryAdd(GeoEntry... entries);
/**
* Returns distance between members in <code>GeoUnit</code> units.
*
* @param firstMember - first object
* @param secondMember - second object
* @param geoUnit - geo unit
* @return distance
*/
Double dist(V firstMember, V secondMember, GeoUnit geoUnit);
/**
* Returns 11 characters long Geohash string mapped by defined member.
*
* @param members - objects
* @return hash mapped by object
*/
Map<V, String> hash(V... members);
/**
* Returns geo-position mapped by defined member.
*
* @param members - objects
* @return geo position mapped by object
*/
Map<V, GeoPosition> pos(V... members);
/**
* Returns the members of a sorted set, which are within the
* borders of specified search conditions.
* <p>
* Usage examples:
* <pre>
* List objects = geo.search(GeoSearchArgs.from(15, 37)
* .radius(200, GeoUnit.KILOMETERS)
* .order(GeoOrder.ASC)
* .count(1)));
* </pre>
* <pre>
* List objects = geo.search(GeoSearchArgs.from(15, 37)
* .radius(200, GeoUnit.KILOMETERS)));
* </pre>
* <p>
* Requires <b>Redis 3.2.10 and higher.</b>
*
* @param args - search conditions object
* @return list of memebers
*/
List<V> search(GeoSearchArgs args);
/*
* Use search() method instead
*
*/
@Deprecated
List<V> radius(double longitude, double latitude, double radius, GeoUnit geoUnit);
/*
* Use search() method instead
*
*/
@Deprecated
List<V> radius(double longitude, double latitude, double radius, GeoUnit geoUnit, int count);
/*
* Use search() method instead
*
*/
@Deprecated
List<V> radius(double longitude, double latitude, double radius, GeoUnit geoUnit, GeoOrder geoOrder);
/*
* Use search() method instead
*
*/
@Deprecated
List<V> radius(double longitude, double latitude, double radius, GeoUnit geoUnit, GeoOrder geoOrder, int count);
/**
* Returns the distance mapped by member of a sorted set,
* which are within the borders of specified search conditions.
* <p>
* Usage examples:
* <pre>
* List objects = geo.searchWithDistance(GeoSearchArgs.from(15, 37)
* .radius(200, GeoUnit.KILOMETERS)
* .order(GeoOrder.ASC)
* .count(1)));
* </pre>
* <pre>
* List objects = geo.searchWithDistance(GeoSearchArgs.from(15, 37)
* .radius(200, GeoUnit.KILOMETERS)));
* </pre>
* <p>
* Requires <b>Redis 3.2.10 and higher.</b>
*
* @param args - search conditions object
* @return distance mapped by object
*/
Map<V, Double> searchWithDistance(GeoSearchArgs args);
/*
* Use searchWithDistance() method instead
*
*/
@Deprecated
Map<V, Double> radiusWithDistance(double longitude, double latitude, double radius, GeoUnit geoUnit);
/*
* Use searchWithDistance() method instead
*
*/
@Deprecated
Map<V, Double> radiusWithDistance(double longitude, double latitude, double radius, GeoUnit geoUnit, int count);
/*
* Use searchWithDistance() method instead
*
*/
@Deprecated
Map<V, Double> radiusWithDistance(double longitude, double latitude, double radius, GeoUnit geoUnit, GeoOrder geoOrder);
/*
* Use searchWithDistance() method instead
*
*/
@Deprecated
Map<V, Double> radiusWithDistance(double longitude, double latitude, double radius, GeoUnit geoUnit, GeoOrder geoOrder, int count);
/**
* Returns the position mapped by member of a sorted set,
* which are within the borders of specified search conditions.
* <p>
* Usage examples:
* <pre>
* List objects = geo.searchWithPosition(GeoSearchArgs.from(15, 37)
* .radius(200, GeoUnit.KILOMETERS)
* .order(GeoOrder.ASC)
* .count(1)));
* </pre>
* <pre>
* List objects = geo.searchWithPosition(GeoSearchArgs.from(15, 37)
* .radius(200, GeoUnit.KILOMETERS)));
* </pre>
* <p>
* Requires <b>Redis 3.2.10 and higher.</b>
*
* @param args - search conditions object
* @return position mapped by object
*/
Map<V, GeoPosition> searchWithPosition(GeoSearchArgs args);
/*
* Use searchWithPosition() method instead
*
*/
@Deprecated
Map<V, GeoPosition> radiusWithPosition(double longitude, double latitude, double radius, GeoUnit geoUnit);
/*
* Use searchWithPosition() method instead
*
*/
@Deprecated
Map<V, GeoPosition> radiusWithPosition(double longitude, double latitude, double radius, GeoUnit geoUnit, int count);
/*
* Use searchWithPosition() method instead
*
*/
@Deprecated
Map<V, GeoPosition> radiusWithPosition(double longitude, double latitude, double radius, GeoUnit geoUnit, GeoOrder geoOrder);
/*
* Use searchWithPosition() method instead
*
*/
@Deprecated
Map<V, GeoPosition> radiusWithPosition(double longitude, double latitude, double radius, GeoUnit geoUnit, GeoOrder geoOrder, int count);
/*
* Use search() method instead
*
*/
@Deprecated
List<V> radius(V member, double radius, GeoUnit geoUnit);
/*
* Use search() method instead
*
*/
@Deprecated
List<V> radius(V member, double radius, GeoUnit geoUnit, int count);
/*
* Use search() method instead
*
*/
@Deprecated
List<V> radius(V member, double radius, GeoUnit geoUnit, GeoOrder geoOrder);
/*
* Use search() method instead
*
*/
@Deprecated
List<V> radius(V member, double radius, GeoUnit geoUnit, GeoOrder geoOrder, int count);
/*
* Use searchWithDistance() method instead
*
*/
@Deprecated
Map<V, Double> radiusWithDistance(V member, double radius, GeoUnit geoUnit);
/*
* Use searchWithDistance() method instead
*
*/
@Deprecated
Map<V, Double> radiusWithDistance(V member, double radius, GeoUnit geoUnit, int count);
/*
* Use searchWithDistance() method instead
*
*/
@Deprecated
Map<V, Double> radiusWithDistance(V member, double radius, GeoUnit geoUnit, GeoOrder geoOrder);
/*
* Use searchWithDistance() method instead
*
*/
@Deprecated
Map<V, Double> radiusWithDistance(V member, double radius, GeoUnit geoUnit, GeoOrder geoOrder, int count);
/*
* Use searchWithPosition() method instead
*
*/
@Deprecated
Map<V, GeoPosition> radiusWithPosition(V member, double radius, GeoUnit geoUnit);
/*
* Use searchWithPosition() method instead
*
*/
@Deprecated
Map<V, GeoPosition> radiusWithPosition(V member, double radius, GeoUnit geoUnit, int count);
/*
* Use searchWithPosition() method instead
*
*/
@Deprecated
Map<V, GeoPosition> radiusWithPosition(V member, double radius, GeoUnit geoUnit, GeoOrder geoOrder);
/*
* Use searchWithPosition() method instead
*
*/
@Deprecated
Map<V, GeoPosition> radiusWithPosition(V member, double radius, GeoUnit geoUnit, GeoOrder geoOrder, int count);
/**
* Finds the members of a sorted set,
* which are within the borders of specified search conditions.
* <p>
* Stores result to <code>destName</code>.
* <p>
* Usage examples:
* <pre>
* long count = geo.storeSearchTo(GeoSearchArgs.from(15, 37)
* .radius(200, GeoUnit.KILOMETERS)
* .order(GeoOrder.ASC)
* .count(1)));
* </pre>
* <pre>
* long count = geo.storeSearchTo(GeoSearchArgs.from(15, 37)
* .radius(200, GeoUnit.KILOMETERS)));
* </pre>
*
* @param args - search conditions object
* @return length of result
*/
long storeSearchTo(String destName, GeoSearchArgs args);
/*
* Use storeSearchTo() method instead
*
*/
@Deprecated
long radiusStoreTo(String destName, double longitude, double latitude, double radius, GeoUnit geoUnit);
/*
* Use storeSearchTo() method instead
*
*/
@Deprecated
long radiusStoreTo(String destName, double longitude, double latitude, double radius, GeoUnit geoUnit, int count);
/*
* Use storeSearchTo() method instead
*
*/
@Deprecated
long radiusStoreTo(String destName, double longitude, double latitude, double radius, GeoUnit geoUnit, GeoOrder geoOrder, int count);
/*
* Use storeSearchTo() method instead
*
*/
@Deprecated
long radiusStoreTo(String destName, V member, double radius, GeoUnit geoUnit);
/*
* Use storeSearchTo() method instead
*
*/
@Deprecated
long radiusStoreTo(String destName, V member, double radius, GeoUnit geoUnit, int count);
/*
* Use storeSearchTo() method instead
*
*/
@Deprecated
long radiusStoreTo(String destName, V member, double radius, GeoUnit geoUnit, GeoOrder geoOrder, int count);
/**
* Finds the members of a sorted set,
* which are within the borders of specified search conditions.
* <p>
* Stores result to <code>destName</code> sorted by distance.
* <p>
* Usage examples:
* <pre>
* long count = geo.storeSortedSearchTo(GeoSearchArgs.from(15, 37)
* .radius(200, GeoUnit.KILOMETERS)
* .order(GeoOrder.ASC)
* .count(1)));
* </pre>
* <pre>
* long count = geo.storeSortedSearchTo(GeoSearchArgs.from(15, 37)
* .radius(200, GeoUnit.KILOMETERS)));
* </pre>
*
* @param args - search conditions object
* @return length of result
*/
long storeSortedSearchTo(String destName, GeoSearchArgs args);
/*
* Use storeSortedSearchTo() method instead
*
*/
@Deprecated
long radiusStoreSortedTo(String destName, double longitude, double latitude, double radius, GeoUnit geoUnit);
/*
* Use storeSortedSearchTo() method instead
*
*/
@Deprecated
long radiusStoreSortedTo(String destName, double longitude, double latitude, double radius, GeoUnit geoUnit, int count);
/*
* Use storeSortedSearchTo() method instead
*
*/
@Deprecated
long radiusStoreSortedTo(String destName, double longitude, double latitude, double radius, GeoUnit geoUnit, GeoOrder geoOrder, int count);
/*
* Use storeSortedSearchTo() method instead
*
*/
@Deprecated
long radiusStoreSortedTo(String destName, V member, double radius, GeoUnit geoUnit);
/*
* Use storeSortedSearchTo() method instead
*
*/
@Deprecated
long radiusStoreSortedTo(String destName, V member, double radius, GeoUnit geoUnit, int count);
/*
* Use storeSortedSearchTo() method instead
*
*/
@Deprecated
long radiusStoreSortedTo(String destName, V member, double radius, GeoUnit geoUnit, GeoOrder geoOrder, int count);
}
| RGeo |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/time/DateUtilsRounding.java | {
"start": 668,
"end": 1398
} | class ____ been copied from different locations within the joda time package, as
* these methods fast when used for rounding, as they do not require conversion to java
* time objects
*
* This code has been copied from jodatime 2.10.1
* The source can be found at https://github.com/JodaOrg/joda-time/tree/v2.10.1
*
* See following methods have been copied (along with required helper variables)
*
* - org.joda.time.chrono.GregorianChronology.calculateFirstDayOfYearMillis(int year)
* - org.joda.time.chrono.BasicChronology.getYear(int year)
* - org.joda.time.chrono.BasicGJChronology.getMonthOfYear(long utcMillis, int year)
* - org.joda.time.chrono.BasicGJChronology.getTotalMillisByYearMonth(int year, int month)
*/
| has |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/ReplaceMinusWithAntiJoinRule.java | {
"start": 3103,
"end": 3778
} | interface ____ extends RelRule.Config {
ReplaceMinusWithAntiJoinRule.ReplaceMinusWithAntiJoinRuleConfig DEFAULT =
ImmutableReplaceMinusWithAntiJoinRule.ReplaceMinusWithAntiJoinRuleConfig.builder()
.build()
.withOperandSupplier(b0 -> b0.operand(Minus.class).anyInputs())
.withRelBuilderFactory(RelFactories.LOGICAL_BUILDER)
.withDescription("ReplaceMinusWithAntiJoinRule");
@Override
default ReplaceMinusWithAntiJoinRule toRule() {
return new ReplaceMinusWithAntiJoinRule(this);
}
}
}
| ReplaceMinusWithAntiJoinRuleConfig |
java | apache__flink | flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/dql/SqlShowCatalogs.java | {
"start": 1269,
"end": 1807
} | class ____ extends SqlShowCall {
public static final SqlSpecialOperator OPERATOR =
new SqlSpecialOperator("SHOW CATALOGS", SqlKind.OTHER);
public SqlShowCatalogs(
SqlParserPos pos, String likeType, SqlCharStringLiteral likeLiteral, boolean notLike) {
super(pos, null, null, likeType, likeLiteral, notLike);
}
@Override
public SqlOperator getOperator() {
return OPERATOR;
}
@Override
String getOperationName() {
return "SHOW CATALOGS";
}
}
| SqlShowCatalogs |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-secure-jersey/src/test/java/smoketest/secure/jersey/AbstractJerseySecureTests.java | {
"start": 1208,
"end": 6502
} | class ____ {
abstract String getPath();
abstract String getManagementPath();
@Autowired
private TestRestTemplate testRestTemplate;
@Test
void helloEndpointIsSecure() {
ResponseEntity<String> entity = restTemplate().getForEntity(getPath() + "/hello", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
}
@Test
void actuatorInsecureEndpoint() {
ResponseEntity<String> entity = restTemplate().getForEntity(getManagementPath() + "/actuator/health",
String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(entity.getBody()).contains("\"status\":\"UP\"");
entity = restTemplate().getForEntity(getManagementPath() + "/actuator/health/diskSpace", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(entity.getBody()).contains("\"status\":\"UP\"");
}
@Test
void actuatorLinksWithAnonymous() {
ResponseEntity<String> entity = restTemplate().getForEntity(getManagementPath() + "/actuator", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
entity = restTemplate().getForEntity(getManagementPath() + "/actuator/", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
}
@Test
void actuatorLinksWithUnauthorizedUser() {
ResponseEntity<String> entity = userRestTemplate().getForEntity(getManagementPath() + "/actuator",
String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.FORBIDDEN);
entity = userRestTemplate().getForEntity(getManagementPath() + "/actuator/", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.FORBIDDEN);
}
@Test
void actuatorLinksWithAuthorizedUser() {
ResponseEntity<String> entity = adminRestTemplate().getForEntity(getManagementPath() + "/actuator",
String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
adminRestTemplate().getForEntity(getManagementPath() + "/actuator/", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
}
@Test
void actuatorSecureEndpointWithAnonymous() {
ResponseEntity<String> entity = restTemplate().getForEntity(getManagementPath() + "/actuator/env",
String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
entity = restTemplate().getForEntity(
getManagementPath() + "/actuator/env/management.endpoints.web.exposure.include", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
}
@Test
void actuatorSecureEndpointWithUnauthorizedUser() {
ResponseEntity<String> entity = userRestTemplate().getForEntity(getManagementPath() + "/actuator/env",
String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.FORBIDDEN);
entity = userRestTemplate().getForEntity(
getManagementPath() + "/actuator/env/management.endpoints.web.exposure.include", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.FORBIDDEN);
}
@Test
void actuatorSecureEndpointWithAuthorizedUser() {
ResponseEntity<String> entity = adminRestTemplate().getForEntity(getManagementPath() + "/actuator/env",
String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
entity = adminRestTemplate().getForEntity(
getManagementPath() + "/actuator/env/management.endpoints.web.exposure.include", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
}
@Test
void secureServletEndpointWithAnonymous() {
ResponseEntity<String> entity = restTemplate().getForEntity(getManagementPath() + "/actuator/se1",
String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
entity = restTemplate().getForEntity(getManagementPath() + "/actuator/se1/list", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
}
@Test
void secureServletEndpointWithUnauthorizedUser() {
ResponseEntity<String> entity = userRestTemplate().getForEntity(getManagementPath() + "/actuator/se1",
String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.FORBIDDEN);
entity = userRestTemplate().getForEntity(getManagementPath() + "/actuator/se1/list", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.FORBIDDEN);
}
@Test
void secureServletEndpointWithAuthorizedUser() {
ResponseEntity<String> entity = adminRestTemplate().getForEntity(getManagementPath() + "/actuator/se1",
String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
entity = adminRestTemplate().getForEntity(getManagementPath() + "/actuator/se1/list", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
}
@Test
void actuatorExcludedFromEndpointRequestMatcher() {
ResponseEntity<String> entity = userRestTemplate().getForEntity(getManagementPath() + "/actuator/mappings",
String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
}
TestRestTemplate restTemplate() {
return this.testRestTemplate;
}
TestRestTemplate adminRestTemplate() {
return this.testRestTemplate.withBasicAuth("admin", "admin");
}
TestRestTemplate userRestTemplate() {
return this.testRestTemplate.withBasicAuth("user", "password");
}
}
| AbstractJerseySecureTests |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/http/HttpSecurityBeanDefinitionParser.java | {
"start": 3543,
"end": 19308
} | class ____ implements BeanDefinitionParser {
private static final Log logger = LogFactory.getLog(HttpSecurityBeanDefinitionParser.class);
private static final String ATT_AUTHENTICATION_MANAGER_REF = "authentication-manager-ref";
private static final String ATT_OBSERVATION_REGISTRY_REF = "observation-registry-ref";
static final String ATT_REQUEST_MATCHER_REF = "request-matcher-ref";
static final String ATT_REDIRECT_TO_HTTPS_REQUEST_MATCHER_REF = "redirect-to-https-request-matcher-ref";
static final String ATT_PATH_PATTERN = "pattern";
static final String ATT_HTTP_METHOD = "method";
static final String ATT_FILTERS = "filters";
static final String OPT_FILTERS_NONE = "none";
static final String ATT_REQUIRES_CHANNEL = "requires-channel";
private static final String ATT_REF = "ref";
private static final String ATT_SECURED = "security";
private static final String OPT_SECURITY_NONE = "none";
private static final String ATT_AFTER = "after";
private static final String ATT_BEFORE = "before";
private static final String ATT_POSITION = "position";
public HttpSecurityBeanDefinitionParser() {
}
/**
* The aim of this method is to build the list of filters which have been defined by
* the namespace elements and attributes within the <http> configuration, along
* with any custom-filter's linked to user-defined filter beans.
* <p>
* By the end of this method, the default <tt>FilterChainProxy</tt> bean should have
* been registered and will have the map of filter chains defined, with the
* "universal" match pattern mapped to the list of beans which have been parsed here.
*/
@SuppressWarnings({ "unchecked" })
@Override
public BeanDefinition parse(Element element, ParserContext pc) {
CompositeComponentDefinition compositeDef = new CompositeComponentDefinition(element.getTagName(),
pc.extractSource(element));
pc.pushContainingComponent(compositeDef);
registerFilterChainProxyIfNecessary(pc, element);
// Obtain the filter chains and add the new chain to it
BeanDefinition listFactoryBean = pc.getRegistry().getBeanDefinition(BeanIds.FILTER_CHAINS);
List<BeanReference> filterChains = (List<BeanReference>) listFactoryBean.getPropertyValues()
.getPropertyValue("sourceList")
.getValue();
filterChains.add(createFilterChain(element, pc));
pc.popAndRegisterContainingComponent();
return null;
}
/**
* Creates the {@code SecurityFilterChain} bean from an <http> element.
*/
private BeanReference createFilterChain(Element element, ParserContext pc) {
boolean secured = !OPT_SECURITY_NONE.equals(element.getAttribute(ATT_SECURED));
if (!secured) {
validateSecuredFilterChainElement(element, pc);
for (int i = 0; i < element.getChildNodes().getLength(); i++) {
if (element.getChildNodes().item(i) instanceof Element) {
pc.getReaderContext()
.error("If you are using <http> to define an unsecured pattern, "
+ "it cannot contain child elements.", pc.extractSource(element));
}
}
return createSecurityFilterChainBean(element, pc, Collections.emptyList());
}
BeanReference portMapper = createPortMapper(element, pc);
ManagedList<BeanReference> authenticationProviders = new ManagedList<>();
BeanReference authenticationManager = createAuthenticationManager(element, pc, authenticationProviders);
boolean forceAutoConfig = isDefaultHttpConfig(element);
BeanMetadataElement observationRegistry = getObservationRegistry(element);
HttpConfigurationBuilder httpBldr = new HttpConfigurationBuilder(element, forceAutoConfig, pc, portMapper,
authenticationManager, observationRegistry);
httpBldr.getSecurityContextRepositoryForAuthenticationFilters();
AuthenticationConfigBuilder authBldr = new AuthenticationConfigBuilder(element, forceAutoConfig, pc,
httpBldr.getSessionCreationPolicy(), httpBldr.getRequestCache(), authenticationManager,
httpBldr.getSecurityContextHolderStrategyForAuthenticationFilters(),
httpBldr.getSecurityContextRepositoryForAuthenticationFilters(), httpBldr.getSessionStrategy(),
portMapper, httpBldr.getCsrfLogoutHandler());
httpBldr.setLogoutHandlers(authBldr.getLogoutHandlers());
httpBldr.setEntryPoint(authBldr.getEntryPointBean());
httpBldr.setAccessDeniedHandler(authBldr.getAccessDeniedHandlerBean());
httpBldr.setCsrfIgnoreRequestMatchers(authBldr.getCsrfIgnoreRequestMatchers());
authenticationProviders.addAll(authBldr.getProviders());
List<OrderDecorator> unorderedFilterChain = new ArrayList<>();
unorderedFilterChain.addAll(httpBldr.getFilters());
unorderedFilterChain.addAll(authBldr.getFilters());
unorderedFilterChain.addAll(buildCustomFilterList(element, pc));
unorderedFilterChain.sort(new OrderComparator());
checkFilterChainOrder(unorderedFilterChain, pc, pc.extractSource(element));
// The list of filter beans
List<BeanMetadataElement> filterChain = new ManagedList<>();
for (OrderDecorator od : unorderedFilterChain) {
filterChain.add(od.bean);
}
return createSecurityFilterChainBean(element, pc, filterChain);
}
private void validateSecuredFilterChainElement(Element element, ParserContext pc) {
if (!StringUtils.hasText(element.getAttribute(ATT_PATH_PATTERN))
&& !StringUtils.hasText(ATT_REQUEST_MATCHER_REF)) {
String message = "The '" + ATT_SECURED + "' attribute must be used in combination with" + " the '"
+ ATT_PATH_PATTERN + "' or '" + ATT_REQUEST_MATCHER_REF + "' attributes.";
pc.getReaderContext().error(message, pc.extractSource(element));
}
}
private static boolean isDefaultHttpConfig(Element httpElt) {
return httpElt.getChildNodes().getLength() == 0 && httpElt.getAttributes().getLength() == 0;
}
private BeanReference createSecurityFilterChainBean(Element element, ParserContext pc, List<?> filterChain) {
BeanMetadataElement filterChainMatcher;
String requestMatcherRef = element.getAttribute(ATT_REQUEST_MATCHER_REF);
String filterChainPattern = element.getAttribute(ATT_PATH_PATTERN);
if (StringUtils.hasText(requestMatcherRef)) {
if (StringUtils.hasText(filterChainPattern)) {
pc.getReaderContext()
.error("You can't define a pattern and a request-matcher-ref for the " + "same filter chain",
pc.extractSource(element));
}
filterChainMatcher = new RuntimeBeanReference(requestMatcherRef);
}
else if (StringUtils.hasText(filterChainPattern)) {
filterChainMatcher = MatcherType.fromElementOrMvc(element).createMatcher(pc, filterChainPattern, null);
}
else {
filterChainMatcher = new RootBeanDefinition(AnyRequestMatcher.class);
}
BeanDefinitionBuilder filterChainBldr = BeanDefinitionBuilder
.rootBeanDefinition(DefaultSecurityFilterChain.class);
filterChainBldr.addConstructorArgValue(filterChainMatcher);
filterChainBldr.addConstructorArgValue(filterChain);
BeanDefinition filterChainBean = filterChainBldr.getBeanDefinition();
String id = element.getAttribute("name");
if (!StringUtils.hasText(id)) {
id = element.getAttribute("id");
if (!StringUtils.hasText(id)) {
id = pc.getReaderContext().generateBeanName(filterChainBean);
}
}
pc.registerBeanComponent(new BeanComponentDefinition(filterChainBean, id));
return new RuntimeBeanReference(id);
}
private BeanReference createPortMapper(Element elt, ParserContext pc) {
// Register the portMapper. A default will always be created, even if no element
// exists.
BeanDefinition portMapper = new PortMappingsBeanDefinitionParser()
.parse(DomUtils.getChildElementByTagName(elt, Elements.PORT_MAPPINGS), pc);
String portMapperName = pc.getReaderContext().generateBeanName(portMapper);
pc.registerBeanComponent(new BeanComponentDefinition(portMapper, portMapperName));
return new RuntimeBeanReference(portMapperName);
}
/**
* Creates the internal AuthenticationManager bean which uses either the externally
* registered (global) one as a parent or the bean specified by
* "authentication-manager-ref".
*
* All the providers registered by this <http> block will be registered with the
* internal authentication manager.
*/
private BeanReference createAuthenticationManager(Element element, ParserContext pc,
ManagedList<BeanReference> authenticationProviders) {
String parentMgrRef = element.getAttribute(ATT_AUTHENTICATION_MANAGER_REF);
BeanDefinitionBuilder authManager = BeanDefinitionBuilder
.rootBeanDefinition(ChildAuthenticationManagerFactoryBean.class);
authManager.addConstructorArgValue(authenticationProviders);
if (StringUtils.hasText(parentMgrRef)) {
RuntimeBeanReference parentAuthManager = new RuntimeBeanReference(parentMgrRef);
authManager.addConstructorArgValue(parentAuthManager);
RootBeanDefinition clearCredentials = new RootBeanDefinition(
ClearCredentialsMethodInvokingFactoryBean.class);
clearCredentials.getPropertyValues().addPropertyValue("targetObject", parentAuthManager);
clearCredentials.getPropertyValues()
.addPropertyValue("targetMethod", "isEraseCredentialsAfterAuthentication");
authManager.addPropertyValue("eraseCredentialsAfterAuthentication", clearCredentials);
}
else {
RootBeanDefinition amfb = new RootBeanDefinition(AuthenticationManagerFactoryBean.class);
amfb.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
String amfbId = pc.getReaderContext().generateBeanName(amfb);
pc.registerBeanComponent(new BeanComponentDefinition(amfb, amfbId));
RootBeanDefinition clearCredentials = new RootBeanDefinition(MethodInvokingFactoryBean.class);
clearCredentials.getPropertyValues().addPropertyValue("targetObject", new RuntimeBeanReference(amfbId));
clearCredentials.getPropertyValues()
.addPropertyValue("targetMethod", "isEraseCredentialsAfterAuthentication");
authManager.addConstructorArgValue(new RuntimeBeanReference(amfbId));
authManager.addPropertyValue("eraseCredentialsAfterAuthentication", clearCredentials);
}
// gh-6009
authManager.addPropertyValue("authenticationEventPublisher",
new RootBeanDefinition(DefaultAuthenticationEventPublisher.class));
authManager.addPropertyValue("observationRegistry", getObservationRegistry(element));
authManager.getRawBeanDefinition().setSource(pc.extractSource(element));
BeanDefinition authMgrBean = authManager.getBeanDefinition();
String id = pc.getReaderContext().generateBeanName(authMgrBean);
pc.registerBeanComponent(new BeanComponentDefinition(authMgrBean, id));
return new RuntimeBeanReference(id);
}
private void checkFilterChainOrder(List<OrderDecorator> filters, ParserContext pc, Object source) {
logger.info("Checking sorted filter chain: " + filters);
for (int i = 0; i < filters.size(); i++) {
OrderDecorator filter = filters.get(i);
if (i > 0) {
OrderDecorator previous = filters.get(i - 1);
if (filter.getOrder() == previous.getOrder()) {
pc.getReaderContext()
.error("Filter beans '" + filter.bean + "' and '" + previous.bean
+ "' have the same 'order' value. When using custom filters, "
+ "please make sure the positions do not conflict with default filters. "
+ "Alternatively you can disable the default filters by removing the corresponding "
+ "child elements from <http> and avoiding the use of <http auto-config='true'>.",
source);
}
}
}
}
List<OrderDecorator> buildCustomFilterList(Element element, ParserContext pc) {
List<Element> customFilterElts = DomUtils.getChildElementsByTagName(element, Elements.CUSTOM_FILTER);
List<OrderDecorator> customFilters = new ArrayList<>();
for (Element elt : customFilterElts) {
String after = elt.getAttribute(ATT_AFTER);
String before = elt.getAttribute(ATT_BEFORE);
String position = elt.getAttribute(ATT_POSITION);
String ref = elt.getAttribute(ATT_REF);
if (!StringUtils.hasText(ref)) {
pc.getReaderContext().error("The '" + ATT_REF + "' attribute must be supplied", pc.extractSource(elt));
}
RuntimeBeanReference bean = new RuntimeBeanReference(ref);
if (WebConfigUtils.countNonEmpty(new String[] { after, before, position }) != 1) {
pc.getReaderContext()
.error("A single '" + ATT_AFTER + "', '" + ATT_BEFORE + "', or '" + ATT_POSITION
+ "' attribute must be supplied", pc.extractSource(elt));
}
if (StringUtils.hasText(position)) {
customFilters.add(new OrderDecorator(bean, SecurityFilters.valueOf(position)));
}
else if (StringUtils.hasText(after)) {
SecurityFilters order = SecurityFilters.valueOf(after);
if (order == SecurityFilters.LAST) {
customFilters.add(new OrderDecorator(bean, SecurityFilters.LAST));
}
else {
customFilters.add(new OrderDecorator(bean, order.getOrder() + 1));
}
}
else if (StringUtils.hasText(before)) {
SecurityFilters order = SecurityFilters.valueOf(before);
if (order == SecurityFilters.FIRST) {
customFilters.add(new OrderDecorator(bean, SecurityFilters.FIRST));
}
else {
customFilters.add(new OrderDecorator(bean, order.getOrder() - 1));
}
}
}
return customFilters;
}
static void registerFilterChainProxyIfNecessary(ParserContext pc, Element element) {
Object source = pc.extractSource(element);
BeanDefinitionRegistry registry = pc.getRegistry();
if (registry.containsBeanDefinition(BeanIds.FILTER_CHAIN_PROXY)) {
return;
}
// Not already registered, so register the list of filter chains and the
// FilterChainProxy
BeanDefinition listFactoryBean = new RootBeanDefinition(ListFactoryBean.class);
listFactoryBean.getPropertyValues().add("sourceList", new ManagedList<>());
pc.registerBeanComponent(new BeanComponentDefinition(listFactoryBean, BeanIds.FILTER_CHAINS));
BeanDefinitionBuilder fcpBldr = BeanDefinitionBuilder.rootBeanDefinition(FilterChainProxy.class);
fcpBldr.getRawBeanDefinition().setSource(source);
fcpBldr.addConstructorArgReference(BeanIds.FILTER_CHAINS);
fcpBldr.addPropertyValue("filterChainValidator", new RootBeanDefinition(DefaultFilterChainValidator.class));
BeanDefinition filterChainDecorator = BeanDefinitionBuilder
.rootBeanDefinition(FilterChainDecoratorFactory.class)
.addPropertyValue("observationRegistry", getObservationRegistry(element))
.getBeanDefinition();
fcpBldr.addPropertyValue("filterChainDecorator", filterChainDecorator);
BeanDefinition fcpBean = fcpBldr.getBeanDefinition();
pc.registerBeanComponent(new BeanComponentDefinition(fcpBean, BeanIds.FILTER_CHAIN_PROXY));
registry.registerAlias(BeanIds.FILTER_CHAIN_PROXY, BeanIds.SPRING_SECURITY_FILTER_CHAIN);
BeanDefinitionBuilder requestRejected = BeanDefinitionBuilder
.rootBeanDefinition(RequestRejectedHandlerPostProcessor.class);
requestRejected.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
requestRejected.addConstructorArgValue("requestRejectedHandler");
requestRejected.addConstructorArgValue(BeanIds.FILTER_CHAIN_PROXY);
requestRejected.addConstructorArgValue("requestRejectedHandler");
requestRejected.addPropertyValue("observationRegistry", getObservationRegistry(element));
AbstractBeanDefinition requestRejectedBean = requestRejected.getBeanDefinition();
String requestRejectedPostProcessorName = pc.getReaderContext().generateBeanName(requestRejectedBean);
registry.registerBeanDefinition(requestRejectedPostProcessorName, requestRejectedBean);
}
private static BeanMetadataElement getObservationRegistry(Element methodSecurityElmt) {
String holderStrategyRef = methodSecurityElmt.getAttribute(ATT_OBSERVATION_REGISTRY_REF);
if (StringUtils.hasText(holderStrategyRef)) {
return new RuntimeBeanReference(holderStrategyRef);
}
return BeanDefinitionBuilder.rootBeanDefinition(ObservationRegistryFactory.class).getBeanDefinition();
}
public static | HttpSecurityBeanDefinitionParser |
java | apache__kafka | test-common/test-common-runtime/src/main/java/org/apache/kafka/common/test/KafkaClusterTestKit.java | {
"start": 27511,
"end": 33162
} | class ____ {
private final Properties properties;
private boolean usingBootstrapControllers = false;
public ClientPropertiesBuilder() {
this.properties = new Properties();
}
public ClientPropertiesBuilder(Properties properties) {
this.properties = properties;
}
public ClientPropertiesBuilder setUsingBootstrapControllers(boolean usingBootstrapControllers) {
this.usingBootstrapControllers = usingBootstrapControllers;
return this;
}
public Properties build() {
if (usingBootstrapControllers) {
properties.setProperty(AdminClientConfig.BOOTSTRAP_CONTROLLERS_CONFIG, bootstrapControllers());
properties.remove(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG);
} else {
properties.setProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers());
properties.remove(AdminClientConfig.BOOTSTRAP_CONTROLLERS_CONFIG);
}
return properties;
}
}
public ClientPropertiesBuilder newClientPropertiesBuilder(Properties properties) {
return new ClientPropertiesBuilder(properties);
}
public ClientPropertiesBuilder newClientPropertiesBuilder() {
return new ClientPropertiesBuilder();
}
public Properties clientProperties() {
return new ClientPropertiesBuilder().build();
}
public String bootstrapServers() {
StringBuilder bld = new StringBuilder();
String prefix = "";
for (Entry<Integer, BrokerServer> entry : brokers.entrySet()) {
int brokerId = entry.getKey();
BrokerServer broker = entry.getValue();
ListenerName listenerName = nodes.brokerListenerName();
// The KafkaConfig#listeners method normalizes the listener name.
// The result from TestKitNodes#brokerListenerName method should be normalized as well,
// so that it matches the listener name in the KafkaConfig.
int port = broker.boundPort(ListenerName.normalised(listenerName.value()));
if (port <= 0) {
throw new RuntimeException("Broker " + brokerId + " does not yet " +
"have a bound port for " + listenerName + ". Did you start " +
"the cluster yet?");
}
bld.append(prefix).append("localhost:").append(port);
prefix = ",";
}
return bld.toString();
}
public String bootstrapControllers() {
StringBuilder bld = new StringBuilder();
String prefix = "";
for (Entry<Integer, ControllerServer> entry : controllers.entrySet()) {
int id = entry.getKey();
ControllerServer controller = entry.getValue();
ListenerName listenerName = nodes.controllerListenerName();
// Although the KafkaConfig#listeners method normalizes the listener name,
// the controller.listener.names configuration does not allow lowercase input,
// so there is no lowercase controller listener name, and we don't need to normalize it.
int port = controller.socketServer().boundPort(listenerName);
if (port <= 0) {
throw new RuntimeException("Controller " + id + " does not yet " +
"have a bound port for " + listenerName + ". Did you start " +
"the cluster yet?");
}
bld.append(prefix).append("localhost:").append(port);
prefix = ",";
}
return bld.toString();
}
public Map<Integer, ControllerServer> controllers() {
return controllers;
}
public Controller waitForActiveController() throws InterruptedException {
AtomicReference<Controller> active = new AtomicReference<>(null);
TestUtils.waitForCondition(() -> {
for (ControllerServer controllerServer : controllers.values()) {
if (controllerServer.controller().isActive()) {
active.set(controllerServer.controller());
}
}
return active.get() != null;
}, 60_000, "Controller not active");
return active.get();
}
public Map<Integer, BrokerServer> brokers() {
return brokers;
}
public Map<Integer, KafkaRaftManager<ApiMessageAndVersion>> raftManagers() {
Map<Integer, KafkaRaftManager<ApiMessageAndVersion>> results = new HashMap<>();
for (BrokerServer brokerServer : brokers().values()) {
results.put(brokerServer.config().brokerId(), brokerServer.sharedServer().raftManager());
}
for (ControllerServer controllerServer : controllers().values()) {
if (!results.containsKey(controllerServer.config().nodeId())) {
results.put(controllerServer.config().nodeId(), controllerServer.sharedServer().raftManager());
}
}
return results;
}
public TestKitNodes nodes() {
return nodes;
}
public MockFaultHandler fatalFaultHandler() {
return faultHandlerFactory.fatalFaultHandler();
}
public MockFaultHandler nonFatalFaultHandler() {
return faultHandlerFactory.nonFatalFaultHandler();
}
@Override
public void close() throws Exception {
List<Entry<String, Future<?>>> futureEntries = new ArrayList<>();
try {
// Note the shutdown order here is chosen to be consistent with
// `KafkaRaftServer`. See comments in that | ClientPropertiesBuilder |
java | google__error-prone | check_api/src/test/java/com/google/errorprone/util/ASTHelpersTest.java | {
"start": 39939,
"end": 39981
} | interface ____ {
String value();
}
@ | B |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_2891/Issue2891Mapper.java | {
"start": 1436,
"end": 1564
} | class ____ extends AbstractSource {
protected Source1(String name) {
super( name );
}
}
| Source1 |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/synonyms/AbstractSynonymsPagedResultAction.java | {
"start": 1749,
"end": 1829
} | class ____ includes support for pagination parameters
*/
public static | that |
java | spring-projects__spring-boot | documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/web/servlet/springmvc/MyRoutingConfiguration.java | {
"start": 1247,
"end": 1725
} | class ____ {
private static final RequestPredicate ACCEPT_JSON = accept(MediaType.APPLICATION_JSON);
@Bean
public RouterFunction<ServerResponse> routerFunction(MyUserHandler userHandler) {
// @formatter:off
return route()
.GET("/{user}", ACCEPT_JSON, userHandler::getUser)
.GET("/{user}/customers", ACCEPT_JSON, userHandler::getUserCustomers)
.DELETE("/{user}", ACCEPT_JSON, userHandler::deleteUser)
.build();
// @formatter:on
}
}
| MyRoutingConfiguration |
java | spring-projects__spring-framework | spring-websocket/src/test/java/org/springframework/web/socket/config/HandlersBeanDefinitionParserTests.java | {
"start": 13347,
"end": 13412
} | class ____ implements ChannelInterceptor {
}
| TestChannelInterceptor |
java | elastic__elasticsearch | x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformSchedulingUtilsTests.java | {
"start": 634,
"end": 2502
} | class ____ extends ESTestCase {
public void testCalculateNextScheduledTimeExponentialBackoff() {
long lastTriggeredTimeMillis = Instant.now().toEpochMilli();
long[] expectedDelayMillis = {
Transform.DEFAULT_TRANSFORM_FREQUENCY.millis(), // normal schedule
5000, // 5s
5000, // 5s
8000, // 8s
16000, // 16s
32000, // 32s
64000, // ~1min
128000, // ~2min
256000, // ~4min
512000, // ~8.5min
1024000, // ~17min
2048000, // ~34min
3600000, // 1h
3600000, // 1h
3600000, // 1h
3600000 // 1h
};
for (int failureCount = 0; failureCount < 1000; ++failureCount) {
assertThat(
"failureCount = " + failureCount,
TransformSchedulingUtils.calculateNextScheduledTime(lastTriggeredTimeMillis, null, failureCount),
is(equalTo(lastTriggeredTimeMillis + expectedDelayMillis[Math.min(failureCount, expectedDelayMillis.length - 1)]))
);
}
}
public void testCalculateNextScheduledTime() {
long now = Instant.now().toEpochMilli();
assertThat(
TransformSchedulingUtils.calculateNextScheduledTime(null, TimeValue.timeValueSeconds(10), 0),
is(greaterThanOrEqualTo(now + 10_000))
);
assertThat(
TransformSchedulingUtils.calculateNextScheduledTime(now, null, 0),
is(equalTo(now + Transform.DEFAULT_TRANSFORM_FREQUENCY.millis()))
);
assertThat(
TransformSchedulingUtils.calculateNextScheduledTime(null, null, 0),
is(greaterThanOrEqualTo(now + Transform.DEFAULT_TRANSFORM_FREQUENCY.millis()))
);
}
}
| TransformSchedulingUtilsTests |
java | google__dagger | javatests/dagger/internal/codegen/KeyFactoryTest.java | {
"start": 2148,
"end": 3074
} | class ____ {
@Rule public CompilationRule compilationRule = new CompilationRule();
@Inject XProcessingEnv processingEnv;
@Inject KeyFactory keyFactory;
@Before public void setUp() {
DaggerKeyFactoryTest_TestComponent.builder()
.javacPluginModule(
new JavacPluginModule(compilationRule.getElements(), compilationRule.getTypes()))
.build()
.inject(this);
}
@Test
public void forInjectConstructorWithResolvedType() {
XTypeElement typeElement =
processingEnv.requireTypeElement(InjectedClass.class.getCanonicalName());
XConstructorElement constructor = getOnlyElement(typeElement.getConstructors());
Key key =
keyFactory.forInjectConstructorWithResolvedType(
constructor.getEnclosingElement().getType());
assertThat(key.toString()).isEqualTo("dagger.internal.codegen.KeyFactoryTest.InjectedClass");
}
static final | KeyFactoryTest |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/GeoSearch.java | {
"start": 3002,
"end": 3072
} | interface ____ extends CompositeArgument {
}
static | GeoPredicate |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/EnhancementOptions.java | {
"start": 1729,
"end": 2700
} | class ____ check.
*
* @return {@code true} indicates that any direct access to fields of entities should be routed to the enhanced
* getter / setter method.
*
* @deprecated Use {@linkplain #doExtendedEnhancement()} instead.
*/
@Deprecated(forRemoval = true, since = "7.1")
boolean doExtendedEnhancement(UnloadedClass classDescriptor);
/**
* Whether to enable support for automatic management of bidirectional associations for this field.
*
* @param field The field to check.
*
* @return {@code true} indicates that the field is enhanced so that for bidirectional persistent fields
* the association is managed, i.e. the associations are automatically set; {@code false} indicates that
* the management is handled by the user.
*
* @deprecated Use {@linkplain #doBiDirectionalAssociationManagement()} instead.
*/
@Deprecated(forRemoval = true, since = "7.1")
boolean doBiDirectionalAssociationManagement(UnloadedField field);
}
| to |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/RangeMapAssert_isNotEmpty_Test.java | {
"start": 944,
"end": 1782
} | class ____ extends RangeMapAssertBaseTest {
@Test
public void should_pass_if_actual_is_not_empty() {
assertThat(actual).isNotEmpty();
}
@Test
public void should_fail_if_actual_is_null() {
// GIVEN
actual = null;
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).isNotEmpty());
// THEN
assertThat(throwable).isInstanceOf(AssertionError.class)
.hasMessage(actualIsNull());
}
@Test
public void should_fail_if_actual_is_empty() {
// GIVEN
actual.clear();
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).isNotEmpty());
// THEN
assertThat(throwable).isInstanceOf(AssertionError.class)
.hasMessage("%nExpecting actual not to be empty".formatted());
}
}
| RangeMapAssert_isNotEmpty_Test |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/cdi/FetchEmbeddedIdTest.java | {
"start": 5102,
"end": 5434
} | class ____ {
@Id
private Long id;
private String name;
public User() {
}
public User(Long id, String name) {
this.id = id;
this.name = name;
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
}
@Entity(name = "GROUP")
@Table(name = "test_group")
public static | User |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/collection/defaultimplementation/Target.java | {
"start": 272,
"end": 524
} | class ____ {
private List<TargetFoo> fooListNoSetter;
public List<TargetFoo> getFooListNoSetter() {
if ( fooListNoSetter == null ) {
fooListNoSetter = new ArrayList<>();
}
return fooListNoSetter;
}
}
| Target |
java | spring-projects__spring-boot | module/spring-boot-tomcat/src/main/java/org/springframework/boot/tomcat/autoconfigure/metrics/TomcatMetricsAutoConfiguration.java | {
"start": 1812,
"end": 2106
} | class ____ {
@Bean
@ConditionalOnBean(MeterRegistry.class)
@ConditionalOnMissingBean({ TomcatMetrics.class, TomcatMetricsBinder.class })
TomcatMetricsBinder tomcatMetricsBinder(MeterRegistry meterRegistry) {
return new TomcatMetricsBinder(meterRegistry);
}
}
| TomcatMetricsAutoConfiguration |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/servletapi/SecurityContextHolderAwareRequestFilterTests.java | {
"start": 3102,
"end": 15262
} | class ____ {
@Captor
private ArgumentCaptor<HttpServletRequest> requestCaptor;
@Mock
private AuthenticationManager authenticationManager;
@Mock
private AuthenticationEntryPoint authenticationEntryPoint;
@Mock
private LogoutHandler logoutHandler;
@Mock
private FilterChain filterChain;
@Mock
private HttpServletRequest request;
@Mock
private HttpServletResponse response;
private List<LogoutHandler> logoutHandlers;
private SecurityContextHolderAwareRequestFilter filter;
@BeforeEach
public void setUp() throws Exception {
this.logoutHandlers = Arrays.asList(this.logoutHandler);
this.filter = new SecurityContextHolderAwareRequestFilter();
this.filter.setAuthenticationEntryPoint(this.authenticationEntryPoint);
this.filter.setAuthenticationManager(this.authenticationManager);
this.filter.setLogoutHandlers(this.logoutHandlers);
this.filter.afterPropertiesSet();
}
@AfterEach
public void clearContext() {
SecurityContextHolder.clearContext();
}
@Test
public void expectedRequestWrapperClassIsUsed() throws Exception {
this.filter.setRolePrefix("ROLE_");
this.filter.doFilter(new MockHttpServletRequest(), new MockHttpServletResponse(), this.filterChain);
// Now re-execute the filter, ensuring our replacement wrapper is still used
this.filter.doFilter(new MockHttpServletRequest(), new MockHttpServletResponse(), this.filterChain);
verify(this.filterChain, times(2)).doFilter(any(SecurityContextHolderAwareRequestWrapper.class),
any(HttpServletResponse.class));
this.filter.destroy();
}
@Test
public void authenticateFalse() throws Exception {
assertThat(wrappedRequest().authenticate(this.response)).isFalse();
verify(this.authenticationEntryPoint).commence(eq(this.requestCaptor.getValue()), eq(this.response),
any(AuthenticationException.class));
verifyNoMoreInteractions(this.authenticationManager, this.logoutHandler);
verify(this.request, times(0)).authenticate(any(HttpServletResponse.class));
}
@Test
public void authenticateTrue() throws Exception {
SecurityContextHolder.getContext()
.setAuthentication(new TestingAuthenticationToken("test", "password", "ROLE_USER"));
assertThat(wrappedRequest().authenticate(this.response)).isTrue();
verifyNoMoreInteractions(this.authenticationEntryPoint, this.authenticationManager, this.logoutHandler);
verify(this.request, times(0)).authenticate(any(HttpServletResponse.class));
}
@Test
public void authenticateNullEntryPointFalse() throws Exception {
this.filter.setAuthenticationEntryPoint(null);
this.filter.afterPropertiesSet();
assertThat(wrappedRequest().authenticate(this.response)).isFalse();
verify(this.request).authenticate(this.response);
verifyNoMoreInteractions(this.authenticationEntryPoint, this.authenticationManager, this.logoutHandler);
}
@Test
public void authenticateNullEntryPointTrue() throws Exception {
given(this.request.authenticate(this.response)).willReturn(true);
this.filter.setAuthenticationEntryPoint(null);
this.filter.afterPropertiesSet();
assertThat(wrappedRequest().authenticate(this.response)).isTrue();
verify(this.request).authenticate(this.response);
verifyNoMoreInteractions(this.authenticationEntryPoint, this.authenticationManager, this.logoutHandler);
}
@Test
public void login() throws Exception {
TestingAuthenticationToken expectedAuth = new TestingAuthenticationToken("user", "password", "ROLE_USER");
given(this.authenticationManager.authenticate(any(UsernamePasswordAuthenticationToken.class)))
.willReturn(expectedAuth);
wrappedRequest().login(expectedAuth.getName(), String.valueOf(expectedAuth.getCredentials()));
assertThat(SecurityContextHolder.getContext().getAuthentication()).isSameAs(expectedAuth);
verifyNoMoreInteractions(this.authenticationEntryPoint, this.logoutHandler);
verify(this.request, times(0)).login(anyString(), anyString());
}
// SEC-2296
@Test
public void loginWithExistingUser() throws Exception {
TestingAuthenticationToken expectedAuth = new TestingAuthenticationToken("user", "password", "ROLE_USER");
SecurityContextHolder.getContext().setAuthentication(expectedAuth);
assertThatExceptionOfType(ServletException.class).isThrownBy(
() -> wrappedRequest().login(expectedAuth.getName(), String.valueOf(expectedAuth.getCredentials())));
assertThat(SecurityContextHolder.getContext().getAuthentication()).isSameAs(expectedAuth);
verifyNoMoreInteractions(this.authenticationEntryPoint, this.logoutHandler);
verify(this.request, times(0)).login(anyString(), anyString());
}
@Test
public void loginFail() throws Exception {
AuthenticationException authException = new BadCredentialsException("Invalid");
given(this.authenticationManager.authenticate(any(UsernamePasswordAuthenticationToken.class)))
.willThrow(authException);
assertThatExceptionOfType(ServletException.class)
.isThrownBy(() -> wrappedRequest().login("invalid", "credentials"))
.withCause(authException);
assertThat(SecurityContextHolder.getContext().getAuthentication()).isNull();
verifyNoMoreInteractions(this.authenticationEntryPoint, this.logoutHandler);
verify(this.request, times(0)).login(anyString(), anyString());
}
@Test
public void loginNullAuthenticationManager() throws Exception {
this.filter.setAuthenticationManager(null);
this.filter.afterPropertiesSet();
String username = "username";
String password = "password";
wrappedRequest().login(username, password);
verify(this.request).login(username, password);
verifyNoMoreInteractions(this.authenticationEntryPoint, this.authenticationManager, this.logoutHandler);
}
@Test
public void loginNullAuthenticationManagerFail() throws Exception {
this.filter.setAuthenticationManager(null);
this.filter.afterPropertiesSet();
String username = "username";
String password = "password";
ServletException authException = new ServletException("Failed Login");
willThrow(authException).given(this.request).login(username, password);
assertThatExceptionOfType(ServletException.class).isThrownBy(() -> wrappedRequest().login(username, password))
.isEqualTo(authException);
verifyNoMoreInteractions(this.authenticationEntryPoint, this.authenticationManager, this.logoutHandler);
}
@Test
public void loginWhenHttpServletRequestHasAuthenticationDetailsThenAuthenticationRequestHasDetails()
throws Exception {
String ipAddress = "10.0.0.100";
String sessionId = "session-id";
given(this.request.getRemoteAddr()).willReturn(ipAddress);
given(this.request.getSession(anyBoolean())).willReturn(new MockHttpSession(null, sessionId));
wrappedRequest().login("username", "password");
ArgumentCaptor<UsernamePasswordAuthenticationToken> authenticationCaptor = ArgumentCaptor
.forClass(UsernamePasswordAuthenticationToken.class);
verify(this.authenticationManager).authenticate(authenticationCaptor.capture());
UsernamePasswordAuthenticationToken authenticationRequest = authenticationCaptor.getValue();
assertThat(authenticationRequest.getDetails()).isInstanceOf(WebAuthenticationDetails.class);
WebAuthenticationDetails details = (WebAuthenticationDetails) authenticationRequest.getDetails();
assertThat(details.getRemoteAddress()).isEqualTo(ipAddress);
assertThat(details.getSessionId()).isEqualTo(sessionId);
}
@Test
public void logout() throws Exception {
TestingAuthenticationToken expectedAuth = new TestingAuthenticationToken("user", "password", "ROLE_USER");
SecurityContextHolder.getContext().setAuthentication(expectedAuth);
HttpServletRequest wrappedRequest = wrappedRequest();
wrappedRequest.logout();
verify(this.logoutHandler).logout(wrappedRequest, this.response, expectedAuth);
verifyNoMoreInteractions(this.authenticationManager, this.logoutHandler);
verify(this.request, times(0)).logout();
}
@Test
public void logoutNullLogoutHandler() throws Exception {
this.filter.setLogoutHandlers(null);
this.filter.afterPropertiesSet();
wrappedRequest().logout();
verify(this.request).logout();
verifyNoMoreInteractions(this.authenticationEntryPoint, this.authenticationManager, this.logoutHandler);
}
// gh-3780
@Test
public void getAsyncContextNullFromSuper() throws Exception {
assertThat(wrappedRequest().getAsyncContext()).isNull();
}
@Test
public void getAsyncContextStart() throws Exception {
ArgumentCaptor<Runnable> runnableCaptor = ArgumentCaptor.forClass(Runnable.class);
SecurityContext context = SecurityContextHolder.createEmptyContext();
TestingAuthenticationToken expectedAuth = new TestingAuthenticationToken("user", "password", "ROLE_USER");
context.setAuthentication(expectedAuth);
SecurityContextHolder.setContext(context);
AsyncContext asyncContext = mock(AsyncContext.class);
given(this.request.getAsyncContext()).willReturn(asyncContext);
Runnable runnable = () -> {
};
wrappedRequest().getAsyncContext().start(runnable);
verifyNoMoreInteractions(this.authenticationManager, this.logoutHandler);
verify(asyncContext).start(runnableCaptor.capture());
DelegatingSecurityContextRunnable wrappedRunnable = (DelegatingSecurityContextRunnable) runnableCaptor
.getValue();
assertThat(ReflectionTestUtils.getField(wrappedRunnable, "delegateSecurityContext")).isEqualTo(context);
assertThat(ReflectionTestUtils.getField(wrappedRunnable, "delegate"));
}
@Test
public void startAsyncStart() throws Exception {
ArgumentCaptor<Runnable> runnableCaptor = ArgumentCaptor.forClass(Runnable.class);
SecurityContext context = SecurityContextHolder.createEmptyContext();
TestingAuthenticationToken expectedAuth = new TestingAuthenticationToken("user", "password", "ROLE_USER");
context.setAuthentication(expectedAuth);
SecurityContextHolder.setContext(context);
AsyncContext asyncContext = mock(AsyncContext.class);
given(this.request.startAsync()).willReturn(asyncContext);
Runnable runnable = () -> {
};
wrappedRequest().startAsync().start(runnable);
verifyNoMoreInteractions(this.authenticationManager, this.logoutHandler);
verify(asyncContext).start(runnableCaptor.capture());
DelegatingSecurityContextRunnable wrappedRunnable = (DelegatingSecurityContextRunnable) runnableCaptor
.getValue();
assertThat(ReflectionTestUtils.getField(wrappedRunnable, "delegateSecurityContext")).isEqualTo(context);
assertThat(ReflectionTestUtils.getField(wrappedRunnable, "delegate"));
}
@Test
public void startAsyncWithRequestResponseStart() throws Exception {
ArgumentCaptor<Runnable> runnableCaptor = ArgumentCaptor.forClass(Runnable.class);
SecurityContext context = SecurityContextHolder.createEmptyContext();
TestingAuthenticationToken expectedAuth = new TestingAuthenticationToken("user", "password", "ROLE_USER");
context.setAuthentication(expectedAuth);
SecurityContextHolder.setContext(context);
AsyncContext asyncContext = mock(AsyncContext.class);
given(this.request.startAsync(this.request, this.response)).willReturn(asyncContext);
Runnable runnable = () -> {
};
wrappedRequest().startAsync(this.request, this.response).start(runnable);
verifyNoMoreInteractions(this.authenticationManager, this.logoutHandler);
verify(asyncContext).start(runnableCaptor.capture());
DelegatingSecurityContextRunnable wrappedRunnable = (DelegatingSecurityContextRunnable) runnableCaptor
.getValue();
assertThat(ReflectionTestUtils.getField(wrappedRunnable, "delegateSecurityContext")).isEqualTo(context);
assertThat(ReflectionTestUtils.getField(wrappedRunnable, "delegate"));
}
// SEC-3047
@Test
public void updateRequestFactory() throws Exception {
SecurityContextHolder.getContext()
.setAuthentication(new TestingAuthenticationToken("user", "password", "PREFIX_USER"));
this.filter.setRolePrefix("PREFIX_");
assertThat(wrappedRequest().isUserInRole("PREFIX_USER")).isTrue();
}
private HttpServletRequest wrappedRequest() throws Exception {
this.filter.doFilter(this.request, this.response, this.filterChain);
verify(this.filterChain).doFilter(this.requestCaptor.capture(), any(HttpServletResponse.class));
return this.requestCaptor.getValue();
}
}
| SecurityContextHolderAwareRequestFilterTests |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/RedisPublisher.java | {
"start": 23591,
"end": 25957
} | class ____<K, V, T> extends CommandWrapper<K, V, T> implements DemandAware.Sink {
private final boolean dissolve;
private final RedisSubscription<T> subscription;
private volatile DemandAware.Source source;
public SubscriptionCommand(RedisCommand<K, V, T> command, RedisSubscription<T> subscription, boolean dissolve) {
super(command);
this.subscription = subscription;
this.dissolve = dissolve;
}
@Override
public boolean hasDemand() {
return isDone() || subscription.state() == State.COMPLETED || subscription.data.isEmpty();
}
@Override
@SuppressWarnings({ "unchecked", "CastCanBeRemovedNarrowingVariableType" })
protected void doOnComplete() {
if (getOutput() != null) {
Object result = getOutput().get();
if (getOutput().hasError()) {
onError(ExceptionFactory.createExecutionException(getOutput().getError()));
return;
}
if (!(getOutput() instanceof StreamingOutput<?>) && result != null) {
if (dissolve && result instanceof Collection) {
Collection<T> collection = (Collection<T>) result;
for (T t : collection) {
if (t != null) {
subscription.onNext(t);
}
}
} else {
subscription.onNext((T) result);
}
}
}
subscription.onAllDataRead();
}
@Override
public void setSource(DemandAware.Source source) {
this.source = source;
}
@Override
public void removeSource() {
this.source = null;
}
@Override
protected void doOnError(Throwable throwable) {
onError(throwable);
}
private void onError(Throwable throwable) {
subscription.onError(throwable);
}
}
/**
* Composite {@link io.lettuce.core.output.StreamingOutput.Subscriber} that can notify multiple nested subscribers.
*
* @param <T> element type
*/
private static | SubscriptionCommand |
java | apache__camel | components/camel-dataset/src/generated/java/org/apache/camel/component/dataset/DataSetEndpointConfigurer.java | {
"start": 728,
"end": 4757
} | class ____ extends MockEndpointConfigurer implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
DataSetEndpoint target = (DataSetEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "consumedelay":
case "consumeDelay": target.setConsumeDelay(property(camelContext, java.time.Duration.class, value).toMillis()); return true;
case "datasetindex":
case "dataSetIndex": target.setDataSetIndex(property(camelContext, java.lang.String.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "initialdelay":
case "initialDelay": target.setInitialDelay(property(camelContext, java.time.Duration.class, value).toMillis()); return true;
case "minrate":
case "minRate": target.setMinRate(property(camelContext, int.class, value)); return true;
case "preloadsize":
case "preloadSize": target.setPreloadSize(property(camelContext, long.class, value)); return true;
case "producedelay":
case "produceDelay": target.setProduceDelay(property(camelContext, java.time.Duration.class, value).toMillis()); return true;
default: return super.configure(camelContext, obj, name, value, ignoreCase);
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "consumedelay":
case "consumeDelay": return long.class;
case "datasetindex":
case "dataSetIndex": return java.lang.String.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "initialdelay":
case "initialDelay": return long.class;
case "minrate":
case "minRate": return int.class;
case "preloadsize":
case "preloadSize": return long.class;
case "producedelay":
case "produceDelay": return long.class;
default: return super.getOptionType(name, ignoreCase);
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
DataSetEndpoint target = (DataSetEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "consumedelay":
case "consumeDelay": return target.getConsumeDelay();
case "datasetindex":
case "dataSetIndex": return target.getDataSetIndex();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "initialdelay":
case "initialDelay": return target.getInitialDelay();
case "minrate":
case "minRate": return target.getMinRate();
case "preloadsize":
case "preloadSize": return target.getPreloadSize();
case "producedelay":
case "produceDelay": return target.getProduceDelay();
default: return super.getOptionValue(obj, name, ignoreCase);
}
}
}
| DataSetEndpointConfigurer |
java | quarkusio__quarkus | integration-tests/hibernate-orm-graphql-panache/src/main/java/io/quarkus/it/hibertnate/orm/graphql/panache/Book.java | {
"start": 266,
"end": 745
} | class ____ {
@Id
@GeneratedValue
private Long id;
@NotBlank
private String title;
@ManyToOne
private Author author;
public Long getId() {
return id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public Author getAuthor() {
return author;
}
public void setAuthor(Author author) {
this.author = author;
}
}
| Book |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/id/enhanced/OptimizerFactory.java | {
"start": 476,
"end": 1862
} | class ____ {
private static final Class<?>[] CTOR_SIG = new Class[] { Class.class, int.class };
private static Optimizer buildOptimizer(OptimizerDescriptor descriptor, Class<?> returnClass, int incrementSize) {
final var optimizer = createOptimizer( descriptor, returnClass, incrementSize );
return optimizer != null ? optimizer : buildFallbackOptimizer( returnClass, incrementSize );
}
private static Optimizer createOptimizer(OptimizerDescriptor descriptor, Class<?> returnClass, int incrementSize) {
final Class<? extends Optimizer> optimizerClass;
try {
optimizerClass = descriptor.getOptimizerClass();
}
catch ( Throwable ignore ) {
OPTIMIZER_MESSAGE_LOGGER.unableToLocateCustomOptimizerClass( descriptor.getExternalName() );
return buildFallbackOptimizer( returnClass, incrementSize );
}
try {
final var ctor = optimizerClass.getConstructor( CTOR_SIG );
return ctor.newInstance( returnClass, incrementSize );
}
catch ( Throwable ignore ) {
OPTIMIZER_MESSAGE_LOGGER.unableToInstantiateOptimizer( descriptor.getExternalName() );
}
return null;
}
private static Optimizer buildFallbackOptimizer(Class<?> returnClass, int incrementSize) {
return new NoopOptimizer( returnClass, incrementSize );
}
/**
* Builds an optimizer
*
* @param type The optimizer type, either a shorthand name or the {@link Optimizer} | OptimizerFactory |
java | spring-projects__spring-boot | module/spring-boot-data-neo4j/src/test/java/org/springframework/boot/data/neo4j/autoconfigure/DataNeo4jAutoConfigurationTests.java | {
"start": 8089,
"end": 8373
} | class ____ {
@Bean
DatabaseSelectionProvider databaseSelectionProvider() {
return () -> DatabaseSelection.byName("custom");
}
}
@Configuration(proxyBeanMethods = false)
@TestAutoConfigurationPackage(TestPersistent.class)
static | CustomDatabaseSelectionProviderConfiguration |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/stubbing/defaultanswers/ReturnsMoreEmptyValuesTest.java | {
"start": 328,
"end": 797
} | class ____ extends TestBase {
private ReturnsMoreEmptyValues rv = new ReturnsMoreEmptyValues();
@Test
public void shouldReturnEmptyArray() {
String[] ret = (String[]) rv.returnValueFor((new String[0]).getClass());
assertTrue(ret.getClass().isArray());
assertTrue(ret.length == 0);
}
@Test
public void shouldReturnEmptyString() {
assertEquals("", rv.returnValueFor(String.class));
}
}
| ReturnsMoreEmptyValuesTest |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/discovery/DiscoveryTests.java | {
"start": 17074,
"end": 19342
} | class ____ will not be executed. It must be static or annotated with @Nested.",
RecursiveHierarchyWithoutNestedTestCase.Inner.class.getName());
}
@Test
void reportsWarningsForInvalidTags() throws Exception {
var results = discoverTestsForClass(InvalidTagsTestCase.class);
var discoveryIssues = results.getDiscoveryIssues().stream().sorted(comparing(DiscoveryIssue::message)).toList();
assertThat(discoveryIssues).hasSize(2);
assertThat(discoveryIssues.getFirst().message()) //
.isEqualTo("Invalid tag syntax in @Tag(\"\") declaration on class '%s'. Tag will be ignored.",
InvalidTagsTestCase.class.getName());
assertThat(discoveryIssues.getFirst().source()) //
.contains(ClassSource.from(InvalidTagsTestCase.class));
var method = InvalidTagsTestCase.class.getDeclaredMethod("test");
assertThat(discoveryIssues.getLast().message()) //
.isEqualTo("Invalid tag syntax in @Tag(\"|\") declaration on method '%s'. Tag will be ignored.",
method.toGenericString());
assertThat(discoveryIssues.getLast().source()) //
.contains(org.junit.platform.engine.support.descriptor.MethodSource.from(method));
}
@Test
void reportsWarningsForBlankDisplayNames() throws Exception {
var results = discoverTestsForClass(BlankDisplayNamesTestCase.class);
var discoveryIssues = results.getDiscoveryIssues().stream().sorted(comparing(DiscoveryIssue::message)).toList();
assertThat(discoveryIssues).hasSize(2);
assertThat(discoveryIssues.getFirst().message()) //
.isEqualTo("@DisplayName on class '%s' must be declared with a non-blank value.",
BlankDisplayNamesTestCase.class.getName());
assertThat(discoveryIssues.getFirst().source()) //
.contains(ClassSource.from(BlankDisplayNamesTestCase.class));
var method = BlankDisplayNamesTestCase.class.getDeclaredMethod("test");
assertThat(discoveryIssues.getLast().message()) //
.isEqualTo("@DisplayName on method '%s' must be declared with a non-blank value.",
method.toGenericString());
assertThat(discoveryIssues.getLast().source()) //
.contains(org.junit.platform.engine.support.descriptor.MethodSource.from(method));
}
// -------------------------------------------------------------------
@SuppressWarnings("unused")
static abstract | but |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/SubtaskCheckpointCoordinatorTest.java | {
"start": 38140,
"end": 41304
} | class ____ implements OneInputStreamOperator<String, String> {
private static final long serialVersionUID = 1L;
private final OperatorSnapshotFutures operatorSnapshotFutures;
private boolean checkpointed = false;
CheckpointOperator(OperatorSnapshotFutures operatorSnapshotFutures) {
this.operatorSnapshotFutures = operatorSnapshotFutures;
}
boolean isCheckpointed() {
return checkpointed;
}
@Override
public void open() throws Exception {}
@Override
public void finish() throws Exception {}
@Override
public void close() throws Exception {}
@Override
public void prepareSnapshotPreBarrier(long checkpointId) {}
@Override
public OperatorSnapshotFutures snapshotState(
long checkpointId,
long timestamp,
CheckpointOptions checkpointOptions,
CheckpointStreamFactory storageLocation)
throws Exception {
this.checkpointed = true;
return operatorSnapshotFutures;
}
@Override
public void initializeState(StreamTaskStateInitializer streamTaskStateManager)
throws Exception {}
@Override
public void setKeyContextElement1(StreamRecord<?> record) {}
@Override
public void setKeyContextElement2(StreamRecord<?> record) {}
@Override
public OperatorMetricGroup getMetricGroup() {
return UnregisteredMetricGroups.createUnregisteredOperatorMetricGroup();
}
@Override
public OperatorID getOperatorID() {
return new OperatorID();
}
@Override
public void notifyCheckpointComplete(long checkpointId) {}
@Override
public void notifyCheckpointAborted(long checkpointId) {}
@Override
public void setCurrentKey(Object key) {}
@Override
public Object getCurrentKey() {
return null;
}
@Override
public void processElement(StreamRecord<String> element) throws Exception {}
@Override
public void processWatermark(Watermark mark) throws Exception {}
@Override
public void processLatencyMarker(LatencyMarker latencyMarker) {}
@Override
public void processWatermarkStatus(WatermarkStatus watermarkStatus) throws Exception {}
}
private static SubtaskCheckpointCoordinator coordinator(ChannelStateWriter channelStateWriter)
throws IOException {
return new SubtaskCheckpointCoordinatorImpl(
new TestCheckpointStorageWorkerView(100),
"test",
StreamTaskActionExecutor.IMMEDIATE,
newDirectExecutorService(),
new DummyEnvironment(),
(message, unused) -> fail(message),
(unused1, unused2) -> CompletableFuture.completedFuture(null),
0,
channelStateWriter,
true,
(callable, duration) -> () -> {});
}
}
| CheckpointOperator |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/event/cluster/AdaptiveRefreshTriggeredEvent.java | {
"start": 2662,
"end": 3590
} | class ____ extends AdaptiveRefreshTriggeredEvent {
private final int attempt;
public PersistentReconnectsAdaptiveRefreshTriggeredEvent(Supplier<Partitions> partitionsSupplier,
Runnable topologyRefreshScheduler, int attempt) {
super(partitionsSupplier, topologyRefreshScheduler,
ClusterTopologyRefreshOptions.RefreshTrigger.PERSISTENT_RECONNECTS);
this.attempt = attempt;
}
/**
* Return the reconnection-attempt at which this event was emitted.
*
* @return the reconnection-attempt at which this event was emitted.
*/
public int getAttempt() {
return attempt;
}
}
/**
* Extension to {@link AdaptiveRefreshTriggeredEvent} providing the uncovered slot value.
*
* @since 6.2.3
*/
public static | PersistentReconnectsAdaptiveRefreshTriggeredEvent |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/server/common/UnitTestFeatureVersion.java | {
"start": 1088,
"end": 2409
} | enum ____ implements FeatureVersion {
UT_FV0_0(0, MetadataVersion.MINIMUM_VERSION, Map.of()),
UT_FV0_1(1, MetadataVersion.IBP_3_7_IV0, Map.of());
private final short featureLevel;
private final MetadataVersion bootstrapMetadataVersion;
private final Map<String, Short> dependencies;
public static final String FEATURE_NAME = "unit.test.feature.version.0";
public static final FV0 LATEST_PRODUCTION = UT_FV0_1;
FV0(int featureLevel, MetadataVersion bootstrapMetadataVersion, Map<String, Short> dependencies) {
this.featureLevel = (short) featureLevel;
this.bootstrapMetadataVersion = bootstrapMetadataVersion;
this.dependencies = dependencies;
}
@Override
public short featureLevel() {
return featureLevel;
}
@Override
public String featureName() {
return FEATURE_NAME;
}
@Override
public MetadataVersion bootstrapMetadataVersion() {
return bootstrapMetadataVersion;
}
@Override
public Map<String, Short> dependencies() {
return dependencies;
}
}
/**
* The feature is used to test latest production lags behind the default value.
*/
public | FV0 |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/NullAwareGetters.java | {
"start": 953,
"end": 1265
} | interface ____ {
/** If no field is null, return false. Returns true if one of the columns is null. */
boolean anyNull();
/**
* For the input fields, if no field is null, return false. Returns true if one of the columns
* is null.
*/
boolean anyNull(int[] fields);
}
| NullAwareGetters |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ShouldHaveEvenArgsTest.java | {
"start": 8470,
"end": 9749
} | class ____ {
private static final Multimap<String, String> multimap = ImmutableMultimap.of();
public void testWithMinimalArgs() {
assertThat(multimap).containsExactly("hello", "there");
}
public void testWithEvenArgs() {
assertThat(multimap).containsExactly("hello", "there", "hello", "there");
}
public void testWithVarargs(Object... args) {
assertThat(multimap).containsExactly("hello", args);
assertThat(multimap).containsExactly("hello", "world", args);
}
public void testWithArray() {
String[] arg = {"hello", "there"};
assertThat(multimap).containsExactly("yolo", arg);
String key = "hello";
Object[] value = new Object[] {};
Object[][] args = new Object[][] {};
assertThat(multimap).containsExactly(key, value);
assertThat(multimap).containsExactly(key, value, (Object[]) args);
assertThat(multimap).containsExactly(key, value, key, value, key, value);
}
}\
""")
.doTest();
}
}
| ShouldHaveEvenArgsMultimapNegativeCases |
java | google__truth | core/src/main/java/com/google/common/truth/Subject.java | {
"start": 4240,
"end": 21365
} | interface ____<SubjectT extends Subject, ActualT> {
/** Creates a new {@link Subject}. */
SubjectT createSubject(FailureMetadata metadata, @Nullable ActualT actual);
}
private final FailureMetadata metadata;
private final @Nullable Object actual;
/**
* The constructor is for use by subclasses only. If you want to create an instance of this class
* itself, call {@link Subject#check(String, Object...) check(...)}{@code .that(actual)}.
*/
protected Subject(FailureMetadata metadata, @Nullable Object actual) {
this.metadata = metadata.updateForSubject(this);
this.actual = actual;
}
/** Checks that the value under test is null. */
public void isNull() {
standardIsEqualTo(null);
}
/**
* Checks that the value under test is not null.
*
* <p>Kotlin users: A call to {@code assertThat(foo).isNotNull()} does not perform a smart cast on
* {@code foo}. If you require a smart cast, consider using {@code foo!!} or {@code
* assertNotNull(foo)} instead. The tradeoffs are that those will look different than any
* surrounding Truth assertions and that they will produce somewhat worse failure messages.
*/
public void isNotNull() {
standardIsNotEqualTo(null);
}
/**
* Checks that the value under test is equal to the given object. For the purposes of this
* comparison, two objects are equal if any of the following is true:
*
* <ul>
* <li>they are equal according to {@link Objects#equals}
* <li>they are arrays and are considered equal by the appropriate {@link Arrays#equals}
* overload
* <li>they are boxed integer types ({@link Byte}, {@link Short}, {@link Character}, {@link
* Integer}, or {@link Long}) and they are numerically equal when converted to {@link Long}.
* <li>the actual value is a boxed floating-point type ({@link Double} or {@link Float}), the
* expected value is an {@link Integer}, and the two are numerically equal when converted to
* {@link Double}. (This allows {@code assertThat(someDouble).isEqualTo(0)} to pass.)
* </ul>
*
* <p><b>Note:</b> This method does not test the {@link Object#equals} implementation itself; it
* <i>assumes</i> that method is functioning correctly according to its contract. Testing an
* {@code equals} implementation requires a utility such as <a
* href="https://central.sonatype.com/artifact/com.google.guava/guava-testlib">guava-testlib</a>'s
* <a
* href="https://www.javadoc.io/doc/com.google.guava/guava-testlib/latest/com/google/common/testing/EqualsTester.html">EqualsTester</a>.
*
* <p>In some cases, this method might not even call {@code equals}. It may instead perform other
* tests that will return the same result as long as {@code equals} is implemented according to
* the contract for its type.
*/
/*
* TODO(cpovirk): Possibly ban overriding isEqualTo+isNotEqualTo in favor of a
* compareForEquality(Object, Object) method. That way, people would need to override only one
* method, they would get a ComparisonFailure and other message niceties, and they'd have less to
* test.
*/
public void isEqualTo(@Nullable Object expected) {
standardIsEqualTo(expected);
}
private void standardIsEqualTo(@Nullable Object expected) {
ComparisonResult difference = compareForEquality(expected);
if (!difference.valuesAreEqual()) {
failEqualityCheck(EqualityCheck.EQUAL, expected, difference);
}
}
/**
* Checks that the value under test is not equal to the given object. The meaning of equality is
* the same as for the {@link #isEqualTo} method.
*/
public void isNotEqualTo(@Nullable Object other) {
standardIsNotEqualTo(other);
}
private void standardIsNotEqualTo(@Nullable Object other) {
ComparisonResult difference = compareForEquality(other);
if (difference.valuesAreEqual()) {
String otherAsString = formatActualOrExpected(other);
if (actualCustomStringRepresentation().equals(otherAsString)) {
failWithoutActual(fact("expected not to be", otherAsString));
} else {
failWithoutActual(
fact("expected not to be", otherAsString),
fact(
"but was; string representation of actual value",
actualCustomStringRepresentation()));
}
}
}
/**
* Returns whether {@code actual} equals {@code expected} differ and, in some cases, a description
* of how they differ.
*
* <p>The equality check follows the rules described on {@link #isEqualTo}.
*/
private ComparisonResult compareForEquality(@Nullable Object expected) {
if (actual == null && expected == null) {
return ComparisonResult.equal();
} else if (actual == null || expected == null) {
return ComparisonResult.differentNoDescription();
} else if (actual instanceof byte[] && expected instanceof byte[]) {
/*
* For a special error message and to use faster Arrays.equals to avoid at least one timeout.
*
* TODO(cpovirk): For performance, use Arrays.equals for other array types (here and/or in
* checkArrayEqualsRecursive)? Exception: double[] and float[], whose GWT implementations I
* think may have both false positives and false negatives (so we can't even use Arrays.equals
* as a fast path for them, nor deepEquals for an Object[] that might contain them). We would
* still fall back to the slower checkArrayEqualsRecursive to produce a nicer failure message
* -- but naturally only for tests that are about to fail, when performance matters less.
*/
return checkByteArrayEquals((byte[]) expected, (byte[]) actual);
} else if (actual.getClass().isArray() && expected.getClass().isArray()) {
return checkArrayEqualsRecursive(expected, actual, "");
} else if (isIntegralBoxedPrimitive(actual) && isIntegralBoxedPrimitive(expected)) {
return ComparisonResult.fromEqualsResult(integralValue(actual) == integralValue(expected));
} else if (actual instanceof Double && expected instanceof Double) {
return ComparisonResult.fromEqualsResult(
Double.compare((Double) actual, (Double) expected) == 0);
} else if (actual instanceof Float && expected instanceof Float) {
return ComparisonResult.fromEqualsResult(
Float.compare((Float) actual, (Float) expected) == 0);
} else if (actual instanceof Double && expected instanceof Integer) {
return ComparisonResult.fromEqualsResult(
Double.compare((Double) actual, (Integer) expected) == 0);
} else if (actual instanceof Float && expected instanceof Integer) {
return ComparisonResult.fromEqualsResult(
Double.compare((Float) actual, (Integer) expected) == 0);
} else {
return ComparisonResult.fromEqualsResult(actual == expected || actual.equals(expected));
}
}
private static boolean isIntegralBoxedPrimitive(@Nullable Object o) {
return o instanceof Byte
|| o instanceof Short
|| o instanceof Character
|| o instanceof Integer
|| o instanceof Long;
}
private static long integralValue(Object o) {
if (o instanceof Character) {
return (Character) o;
} else if (o instanceof Number) {
return ((Number) o).longValue();
} else {
throw new AssertionError(o + " must be either a Character or a Number.");
}
}
/**
* Checks that the value under test is the same instance as the given object.
*
* <p>This method considers {@code null} to be "the same instance as" {@code null} and not the
* same instance as anything else.
*/
public final void isSameInstanceAs(@Nullable Object expected) {
if (actual != expected) {
failEqualityCheck(
SAME_INSTANCE,
expected,
/*
* Pass through *whether* the values are equal so that failEqualityCheck() can print that
* information. But remove the description of the difference, which is always about
* content, since people calling isSameInstanceAs() are explicitly not interested in
* content, only object identity.
*/
compareForEquality(expected).withoutDescription());
}
}
/**
* Checks that the value under test is not the same instance as the given object.
*
* <p>This method considers {@code null} to be "the same instance as" {@code null} and not the
* same instance as anything else.
*/
public final void isNotSameInstanceAs(@Nullable Object other) {
if (actual == other) {
/*
* We use actualCustomStringRepresentation() because it might be overridden to be better than
* actual.toString()/other.toString().
*/
failWithoutActual(
fact("expected not to be specific instance", actualCustomStringRepresentation()));
}
}
/**
* Checks that the value under test is an instance of the given class.
*
* <p>Kotlin users: A call to {@code assertThat(foo).isInstanceOf(Bar::class.java)} does not
* perform a smart cast on {@code foo}. If you require a smart cast, consider using {@code foo as
* Bar} or {@code assertIs<Bar>(foo)} instead. The tradeoffs are that those will look different
* than any surrounding Truth assertions and that they will produce worse failure messages (for
* example, by not including the actual value, only its type).
*/
public void isInstanceOf(@Nullable Class<?> clazz) {
if (clazz == null) {
failWithoutActual(
simpleFact("could not perform instanceof check because expected type was null"),
actualValue("value to check was"));
return;
}
clazz = Primitives.wrap(clazz);
if (actual == null) {
failWithActual("expected instance of", longName(clazz));
return;
}
if (!isInstanceOfType(actual, clazz)) {
if (classMetadataUnsupported()) {
throw new UnsupportedOperationException(
actualCustomStringRepresentation()
+ ", an instance of "
+ longName(actual.getClass())
+ ", may or may not be an instance of "
+ longName(clazz)
+ ". Under -XdisableClassMetadata, we do not have enough information to tell.");
}
failWithoutActual(
fact("expected instance of", longName(clazz)),
fact("but was instance of", longName(actual.getClass())),
fact("with value", actualCustomStringRepresentation()));
}
}
/** Checks that the value under test is not an instance of the given class. */
public void isNotInstanceOf(@Nullable Class<?> clazz) {
if (clazz == null) {
failWithoutActual(
simpleFact("could not perform instanceof check because expected type was null"),
actualValue("value to check was"));
return;
}
clazz = Primitives.wrap(clazz);
if (classMetadataUnsupported()) {
throw new UnsupportedOperationException(
"isNotInstanceOf is not supported under -XdisableClassMetadata");
}
if (actual == null) {
return; // null is not an instance of clazz.
}
if (isInstanceOfType(actual, clazz)) {
failWithActual("expected not to be an instance of", longName(clazz));
/*
* TODO(cpovirk): Consider including actual.getClass() if it's not clazz itself but only a
* subtype.
*/
}
}
/** Checks that the value under test is equal to any element in the given iterable. */
public void isIn(@Nullable Iterable<?> iterable) {
if (iterable == null) {
failWithoutActual(
simpleFact(
"could not perform equality check because iterable of elements to compare to was"
+ " null"),
valueToCompareWas());
return;
}
if (!contains(iterable, actual)) {
failWithActual("expected any of", iterable);
}
}
private static boolean contains(Iterable<?> haystack, @Nullable Object needle) {
if (isKotlinRange(haystack)) {
return kotlinRangeContains(haystack, needle);
}
return Iterables.contains(haystack, needle);
}
/** Checks that the value under test is equal to any of the given elements. */
public void isAnyOf(
@Nullable Object first, @Nullable Object second, @Nullable Object @Nullable ... rest) {
isIn(accumulate(first, second, rest));
}
/** Checks that the value under test is not equal to any element in the given iterable. */
public void isNotIn(@Nullable Iterable<?> iterable) {
if (iterable == null) {
failWithoutActual(
simpleFact(
"could not perform equality check because iterable of elements to compare to was"
+ " null"),
valueToCompareWas());
return;
}
if (Iterables.contains(iterable, actual)) {
failWithActual("expected not to be any of", iterable);
}
}
/** Checks that the value under test is not equal to any of the given elements. */
public void isNoneOf(
@Nullable Object first, @Nullable Object second, @Nullable Object @Nullable ... rest) {
isNotIn(accumulate(first, second, rest));
}
/** Returns the actual value under test. */
final @Nullable Object actualForPackageMembersToCall() {
return actual;
}
/**
* Returns a string representation of the actual value for inclusion in failure messages.
*
* <p>Subjects should override this with care.
*
* <p>By default, this method returns {@code String.valueOf(getActualValue())} for most types. It
* does have some special logic for a few cases, like arrays.
*/
/*
* TODO(cpovirk): Consider potential improvements to formatting APIs. For example, if users want
* to format the actual value specially, then it seems likely that they'll want to format the
* expected value specially, too. And that applies just as well to APIs like isIn(). Maybe we'll
* want an API that supports formatting those values, too (like formatActualOrExpected below)? See
* also the related b/70930431.
*/
@ForOverride
protected String actualCustomStringRepresentation() {
return formatActualOrExpected(actual);
}
/**
* Access to {@link #actualCustomStringRepresentation()} from within the package. For creating
* {@link Fact} instances, we should use {@link #butWas} or {@link #actualValue} instead of this.
* This method is useful primarily for delegating from one subject's {@link
* #actualCustomStringRepresentation} method to another's.
*/
final String actualCustomStringRepresentationForPackageMembersToCall() {
return actualCustomStringRepresentation();
}
private static String formatActualOrExpected(@Nullable Object o) {
if (o instanceof byte[]) {
return base16((byte[]) o);
} else if (o != null && o.getClass().isArray()) {
return String.valueOf(arrayAsListRecursively(o));
} else if (o instanceof Double) {
return doubleToString((Double) o);
} else if (o instanceof Float) {
return floatToString((Float) o);
} else {
// TODO(cpovirk): Consider renaming the called method to mention "NonArray."
/*
* TODO(cpovirk): Should the called method and arrayAsListRecursively(...) both call back into
* formatActualOrExpected for its handling of byte[] and float/double? Or is there some other
* restructuring of this set of methods that we should undertake?
*/
return stringValueForFailure(o);
}
}
// We could add a dep on com.google.common.io, but that seems overkill for base16 encoding
private static String base16(byte[] bytes) {
StringBuilder sb = new StringBuilder(2 * bytes.length);
for (byte b : bytes) {
sb.append(hexDigitsUpper[(b >> 4) & 0xf]).append(hexDigitsUpper[b & 0xf]);
}
return sb.toString();
}
private static final char[] hexDigitsUpper = "0123456789ABCDEF".toCharArray();
private static @Nullable Object arrayAsListRecursively(@Nullable Object input) {
if (input instanceof Object[]) {
return Lists.<@Nullable Object, @Nullable Object>transform(
asList((@Nullable Object[]) input), Subject::arrayAsListRecursively);
} else if (input instanceof boolean[]) {
return asList((boolean[]) input);
} else if (input instanceof int[]) {
return asList((int[]) input);
} else if (input instanceof long[]) {
return asList((long[]) input);
} else if (input instanceof short[]) {
return asList((short[]) input);
} else if (input instanceof byte[]) {
return asList((byte[]) input);
} else if (input instanceof double[]) {
return doubleArrayAsString((double[]) input);
} else if (input instanceof float[]) {
return floatArrayAsString((float[]) input);
} else if (input instanceof char[]) {
return asList((char[]) input);
} else {
return input;
}
}
/**
* The result of comparing two objects for equality. This includes both the "equal"/"not-equal"
* bit and, in the case of "not equal," optional facts describing the difference.
*/
private static final | Factory |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/util/InstanceFilterTests.java | {
"start": 815,
"end": 2137
} | class ____ {
@Test
void emptyFilterApplyMatchIfEmpty() {
InstanceFilter<String> filter = new InstanceFilter<>(null, null);
match(filter, "foo");
match(filter, "bar");
}
@Test
void includesFilter() {
InstanceFilter<String> filter = new InstanceFilter<>(List.of("First", "Second"), null);
match(filter, "Second");
doNotMatch(filter, "foo");
}
@Test
void excludesFilter() {
InstanceFilter<String> filter = new InstanceFilter<>(null, List.of("First", "Second"));
doNotMatch(filter, "Second");
match(filter, "foo");
}
@Test
void includesAndExcludesFilters() {
InstanceFilter<String> filter = new InstanceFilter<>(List.of("foo", "Bar"), List.of("First", "Second"));
doNotMatch(filter, "Second");
match(filter, "foo");
}
@Test
void includesAndExcludesFiltersConflict() {
InstanceFilter<String> filter = new InstanceFilter<>(List.of("First"), List.of("First"));
doNotMatch(filter, "First");
}
private static <T> void match(InstanceFilter<T> filter, T candidate) {
assertThat(filter.match(candidate)).as("filter '" + filter + "' should match " + candidate).isTrue();
}
private static <T> void doNotMatch(InstanceFilter<T> filter, T candidate) {
assertThat(filter.match(candidate)).as("filter '" + filter + "' should not match " + candidate).isFalse();
}
}
| InstanceFilterTests |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-hibernate/src/main/java/smoketest/jpa/SampleJpaApplication.java | {
"start": 801,
"end": 938
} | class ____ {
public static void main(String[] args) {
SpringApplication.run(SampleJpaApplication.class, args);
}
}
| SampleJpaApplication |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/io/support/PropertySourceProcessorTests.java | {
"start": 4829,
"end": 5101
} | class ____ implements PropertySourceFactory {
@Override
public PropertySource<?> createPropertySource(String name, EncodedResource resource) {
throw mock(PlaceholderResolutionException.class);
}
}
private static | PlaceholderResolutionExceptionPropertySourceFactory |
java | netty__netty | codec-protobuf/src/main/java/io/netty/handler/codec/protobuf/ProtobufEncoderNano.java | {
"start": 2233,
"end": 2918
} | class ____ extends MessageToMessageEncoder<MessageNano> {
public ProtobufEncoderNano() {
super(MessageNano.class);
}
@Override
protected void encode(
ChannelHandlerContext ctx, MessageNano msg, List<Object> out) throws Exception {
final int size = msg.getSerializedSize();
final ByteBuf buffer = ctx.alloc().heapBuffer(size, size);
final byte[] array = buffer.array();
CodedOutputByteBufferNano cobbn = CodedOutputByteBufferNano.newInstance(array,
buffer.arrayOffset(), buffer.capacity());
msg.writeTo(cobbn);
buffer.writerIndex(size);
out.add(buffer);
}
}
| ProtobufEncoderNano |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/security/DelegationTokenRenewer.java | {
"start": 13060,
"end": 21416
} | class ____ {
Token<?> token;
Configuration conf;
TokenWithConf(Token<?> token, Configuration conf) {
this.token = token;
this.conf = conf;
}
}
private LinkedBlockingQueue<TokenWithConf> queue =
new LinkedBlockingQueue<TokenWithConf>();
public DelegationTokenCancelThread() {
super("Delegation Token Canceler");
setDaemon(true);
}
public void cancelToken(Token<?> token,
Configuration conf) {
TokenWithConf tokenWithConf = new TokenWithConf(token, conf);
while (!queue.offer(tokenWithConf)) {
LOG.warn("Unable to add token " + token + " for cancellation. " +
"Will retry..");
try {
Thread.sleep(100);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
public void work() {
TokenWithConf tokenWithConf = null;
while (true) {
try {
tokenWithConf = queue.take();
final TokenWithConf current = tokenWithConf;
if (LOG.isDebugEnabled()) {
LOG.debug("Cancelling token " + tokenWithConf.token.getService());
}
// need to use doAs so that http can find the kerberos tgt
UserGroupInformation.getLoginUser()
.doAs(new PrivilegedExceptionAction<Void>(){
@Override
public Void run() throws Exception {
current.token.cancel(current.conf);
return null;
}
});
} catch (IOException e) {
LOG.warn("Failed to cancel token " + tokenWithConf.token + " " +
StringUtils.stringifyException(e));
} catch (RuntimeException e) {
LOG.warn("Failed to cancel token " + tokenWithConf.token + " " +
StringUtils.stringifyException(e));
} catch (InterruptedException ie) {
return;
} catch (Throwable t) {
LOG.warn("Got exception " + StringUtils.stringifyException(t) +
". Exiting..");
System.exit(-1);
}
}
}
}
@VisibleForTesting
public Set<Token<?>> getDelegationTokens() {
Set<Token<?>> tokens = new HashSet<Token<?>>();
for (Set<DelegationTokenToRenew> tokenList : appTokens.values()) {
for (DelegationTokenToRenew token : tokenList) {
tokens.add(token.token);
}
}
return tokens;
}
/**
* Asynchronously add application tokens for renewal.
* @param applicationId added application
* @param ts tokens
* @param shouldCancelAtEnd true if tokens should be canceled when the app is
* done else false.
* @param user user
* @param tokenConf tokenConf sent by the app-submitter
*/
public void addApplicationAsync(ApplicationId applicationId, Credentials ts,
boolean shouldCancelAtEnd, String user, Configuration tokenConf) {
processDelegationTokenRenewerEvent(new DelegationTokenRenewerAppSubmitEvent(
applicationId, ts, shouldCancelAtEnd, user, tokenConf));
}
/**
* Asynchronously add application tokens for renewal.
* @param applicationId
* added application
* @param ts
* tokens
* @param shouldCancelAtEnd
* true if tokens should be canceled when the app is done else false.
* @param user user
* @param tokenConf tokenConf sent by the app-submitter
*/
public void addApplicationAsyncDuringRecovery(ApplicationId applicationId,
Credentials ts, boolean shouldCancelAtEnd, String user,
Configuration tokenConf) {
processDelegationTokenRenewerEvent(
new DelegationTokenRenewerAppRecoverEvent(applicationId, ts,
shouldCancelAtEnd, user, tokenConf));
}
// Only for testing
// Synchronously renew delegation tokens.
public void addApplicationSync(ApplicationId applicationId, Credentials ts,
boolean shouldCancelAtEnd, String user) throws IOException,
InterruptedException {
handleAppSubmitEvent(new DelegationTokenRenewerAppSubmitEvent(
applicationId, ts, shouldCancelAtEnd, user, new Configuration()));
}
private void handleAppSubmitEvent(AbstractDelegationTokenRenewerAppEvent evt)
throws IOException, InterruptedException {
ApplicationId applicationId = evt.getApplicationId();
Credentials ts = evt.getCredentials();
boolean shouldCancelAtEnd = evt.shouldCancelAtEnd();
if (ts == null) {
return; // nothing to add
}
LOG.debug("Registering tokens for renewal for: appId = {}", applicationId);
Collection<Token<?>> tokens = ts.getAllTokens();
long now = System.currentTimeMillis();
// find tokens for renewal, but don't add timers until we know
// all renewable tokens are valid
// At RM restart it is safe to assume that all the previously added tokens
// are valid
appTokens.put(applicationId,
Collections.synchronizedSet(new HashSet<DelegationTokenToRenew>()));
Set<DelegationTokenToRenew> tokenList = new HashSet<DelegationTokenToRenew>();
boolean hasHdfsToken = false;
for (Token<?> token : tokens) {
if (token.isManaged()) {
if (token.getKind().equals(HDFS_DELEGATION_KIND)) {
LOG.info(applicationId + " found existing hdfs token " + token);
hasHdfsToken = true;
}
if (skipTokenRenewal(token)) {
continue;
}
DelegationTokenToRenew dttr = allTokens.get(token);
if (dttr == null) {
Configuration tokenConf;
if (evt.tokenConf != null) {
// Override conf with app provided conf - this is required in cases
// where RM does not have the required conf to communicate with
// remote hdfs cluster. The conf is provided by the application
// itself.
tokenConf = evt.tokenConf;
LOG.info("Using app provided token conf for renewal,"
+ " number of configs = " + tokenConf.size());
if (LOG.isDebugEnabled()) {
for (Iterator<Map.Entry<String, String>> itor =
tokenConf.iterator(); itor.hasNext(); ) {
Map.Entry<String, String> entry = itor.next();
LOG.debug("Token conf key is {} and value is {}",
entry.getKey(), entry.getValue());
}
}
} else {
tokenConf = getConfig();
}
dttr = new DelegationTokenToRenew(Arrays.asList(applicationId), token,
tokenConf, now, shouldCancelAtEnd, evt.getUser());
try {
renewToken(dttr);
} catch (IOException ioe) {
if (ioe instanceof SecretManager.InvalidToken
&& dttr.maxDate < Time.now()
&& evt instanceof DelegationTokenRenewerAppRecoverEvent
&& token.getKind().equals(HDFS_DELEGATION_KIND)) {
LOG.info("Failed to renew hdfs token " + dttr
+ " on recovery as it expired, requesting new hdfs token for "
+ applicationId + ", user=" + evt.getUser(), ioe);
requestNewHdfsDelegationTokenAsProxyUser(
Arrays.asList(applicationId), evt.getUser(),
evt.shouldCancelAtEnd());
continue;
}
throw new IOException("Failed to renew token: " + dttr.token, ioe);
}
}
tokenList.add(dttr);
}
}
if (!tokenList.isEmpty()) {
// Renewing token and adding it to timer calls are separated purposefully
// If user provides incorrect token then it should not be added for
// renewal.
for (DelegationTokenToRenew dtr : tokenList) {
DelegationTokenToRenew currentDtr =
allTokens.putIfAbsent(dtr.token, dtr);
if (currentDtr != null) {
// another job beat us
currentDtr.referringAppIds.add(applicationId);
appTokens.get(applicationId).add(currentDtr);
} else {
appTokens.get(applicationId).add(dtr);
setTimerForTokenRenewal(dtr);
}
}
}
if (!hasHdfsToken) {
requestNewHdfsDelegationTokenAsProxyUser(Arrays.asList(applicationId),
evt.getUser(),
shouldCancelAtEnd);
}
}
/**
* Task - to renew a token
*
*/
private | TokenWithConf |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/repeatable/BothSelectAndSelectProviderMapper.java | {
"start": 805,
"end": 1003
} | interface ____ {
@Select("SELECT * FROM users WHERE id = #{id}")
@SelectProvider(type = SqlProvider.class, method = "getUser")
User getUser(Integer id);
final | BothSelectAndSelectProviderMapper |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/json/JsonArray.java | {
"start": 1156,
"end": 1484
} | class ____ encapsulate the notion of a JSON array.
*
* The implementation adheres to the <a href="http://rfc-editor.org/rfc/rfc7493.txt">RFC-7493</a> to support Temporal
* data types as well as binary data.
*
* Please see the documentation for more information.
*
* @author <a href="http://tfox.org">Tim Fox</a>
*/
public | to |
java | resilience4j__resilience4j | resilience4j-bulkhead/src/main/java/io/github/resilience4j/bulkhead/ThreadPoolBulkheadRegistry.java | {
"start": 13475,
"end": 16952
} | class ____ {
private static final String DEFAULT_CONFIG = "default";
private RegistryStore<ThreadPoolBulkhead> registryStore;
private Map<String, ThreadPoolBulkheadConfig> threadPoolBulkheadConfigsMap;
private List<RegistryEventConsumer<ThreadPoolBulkhead>> registryEventConsumers;
private Map<String, String> tags;
public Builder() {
this.threadPoolBulkheadConfigsMap = new java.util.HashMap<>();
this.registryEventConsumers = new ArrayList<>();
}
public Builder withRegistryStore(RegistryStore<ThreadPoolBulkhead> registryStore) {
this.registryStore = registryStore;
return this;
}
/**
* Configures a ThreadPoolBulkheadRegistry with a custom default ThreadPoolBulkhead configuration.
*
* @param threadPoolBulkheadConfig a custom default ThreadPoolBulkhead configuration
* @return a {@link ThreadPoolBulkheadRegistry.Builder}
*/
public Builder withThreadPoolBulkheadConfig(ThreadPoolBulkheadConfig threadPoolBulkheadConfig) {
threadPoolBulkheadConfigsMap.put(DEFAULT_CONFIG, threadPoolBulkheadConfig);
return this;
}
/**
* Configures a ThreadPoolBulkheadRegistry with a custom ThreadPoolBulkhead configuration.
*
* @param configName configName for a custom shared ThreadPoolBulkhead configuration
* @param configuration a custom shared ThreadPoolBulkhead configuration
* @return a {@link ThreadPoolBulkheadRegistry.Builder}
* @throws IllegalArgumentException if {@code configName.equals("default")}
*/
public Builder addThreadPoolBulkheadConfig(String configName, ThreadPoolBulkheadConfig configuration) {
if (configName.equals(DEFAULT_CONFIG)) {
throw new IllegalArgumentException(
"You cannot add another configuration with name 'default' as it is preserved for default configuration");
}
threadPoolBulkheadConfigsMap.put(configName, configuration);
return this;
}
/**
* Configures a ThreadPoolBulkheadRegistry with a ThreadPoolBulkhead registry event consumer.
*
* @param registryEventConsumer a ThreadPoolBulkhead registry event consumer.
* @return a {@link ThreadPoolBulkheadRegistry.Builder}
*/
public Builder addRegistryEventConsumer(RegistryEventConsumer<ThreadPoolBulkhead> registryEventConsumer) {
this.registryEventConsumers.add(registryEventConsumer);
return this;
}
/**
* Configures a ThreadPoolBulkheadRegistry with Tags.
* <p>
* Tags added to the registry will be added to every instance created by this registry.
*
* @param tags default tags to add to the registry.
* @return a {@link ThreadPoolBulkheadRegistry.Builder}
*/
public Builder withTags(Map<String, String> tags) {
this.tags = tags;
return this;
}
/**
* Builds a ThreadPoolBulkheadRegistry
*
* @return the ThreadPoolBulkheadRegistry
*/
public ThreadPoolBulkheadRegistry build() {
return new InMemoryThreadPoolBulkheadRegistry(threadPoolBulkheadConfigsMap, registryEventConsumers, tags,
registryStore);
}
}
}
| Builder |
java | elastic__elasticsearch | distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCliProvider.java | {
"start": 600,
"end": 810
} | class ____ implements CliToolProvider {
@Override
public String name() {
return "server";
}
@Override
public Command create() {
return new ServerCli();
}
}
| ServerCliProvider |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/util/ModuleUtils.java | {
"start": 10671,
"end": 10719
} | class ____.
*
* @since 1.11
*/
static | scanner |
java | spring-projects__spring-boot | module/spring-boot-security/src/main/java/org/springframework/boot/security/autoconfigure/MissingAlternativeUserDetailsManagerOrUserPropertiesConfigured.java | {
"start": 1743,
"end": 1842
} | class ____ {
}
@ConditionalOnProperty("spring.security.user.name")
static final | MissingAlternative |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/config/MvcNamespaceTests.java | {
"start": 50512,
"end": 50880
} | class ____ extends MockServletContext {
@Override
public RequestDispatcher getNamedDispatcher(String path) {
if (path.equals("default") || path.equals("custom")) {
return new MockRequestDispatcher("/");
}
else {
return null;
}
}
@Override
public String getVirtualServerName() {
return null;
}
}
public static | TestMockServletContext |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/introspect/POJOPropertiesCollectorTest.java | {
"start": 4166,
"end": 4437
} | class ____ { // important: non-static!
private int i;
// annotation does not matter -- just need one on the last argument
public Issue701Bean(@JsonProperty int i) { this.i = i; }
public int getX() { return i; }
}
static | Issue701Bean |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeFSDataSetSink.java | {
"start": 1539,
"end": 4085
} | class ____ implements MetricsSink {
private Set<String> nameMap;
private int count;
/**
* add a metrics record in the sink
*
* @param record the record to add
*/
@Override
public void putMetrics(MetricsRecord record) {
// let us do this only once, otherwise
// our count could go out of sync.
if (count == 0) {
for (AbstractMetric m : record.metrics()) {
if (nameMap.contains(m.name())) {
count++;
}
}
for (MetricsTag t : record.tags()) {
if (nameMap.contains(t.name())) {
count++;
}
}
}
}
/**
* Flush any buffered metrics
*/
@Override
public void flush() {
}
/**
* Initialize the plugin
*
* @param conf the configuration object for the plugin
*/
@Override
public void init(SubsetConfiguration conf) {
nameMap = new TreeSet<>();
nameMap.add("DfsUsed");
nameMap.add("Capacity");
nameMap.add("Remaining");
nameMap.add("StorageInfo");
nameMap.add("NumFailedVolumes");
nameMap.add("LastVolumeFailureDate");
nameMap.add("EstimatedCapacityLostTotal");
nameMap.add("CacheUsed");
nameMap.add("CacheCapacity");
nameMap.add("NumBlocksCached");
nameMap.add("NumBlocksFailedToCache");
nameMap.add("NumBlocksFailedToUnCache");
nameMap.add("Context");
nameMap.add("Hostname");
}
public int getMapCount() {
return nameMap.size();
}
public int getFoundKeyCount() {
return count;
}
}
@Test
/**
* This test creates a Source and then calls into the Sink that we
* have registered. That is calls into FSDataSetSinkTest
*/
public void testFSDataSetMetrics() throws InterruptedException {
Configuration conf = new HdfsConfiguration();
String bpid = "FSDatSetSink-Test";
SimulatedFSDataset fsdataset = new SimulatedFSDataset(null, conf);
fsdataset.addBlockPool(bpid, conf);
FSDataSetSinkTest sink = new FSDataSetSinkTest();
sink.init(null);
ms.init("Test");
ms.start();
ms.register("FSDataSetSource", "FSDataSetSource", fsdataset);
ms.register("FSDataSetSink", "FSDataSetSink", sink);
ms.startMetricsMBeans();
ms.publishMetricsNow();
Thread.sleep(4000);
ms.stopMetricsMBeans();
ms.shutdown();
// make sure we got all expected metric in the call back
assertEquals(sink.getMapCount(), sink.getFoundKeyCount());
}
}
| FSDataSetSinkTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java | {
"start": 1664,
"end": 3580
} | class ____ extends TermBasedFieldType {
LegacyTypeFieldType() {
super(NAME, IndexType.docValuesOnly(), true, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap());
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public boolean isSearchable() {
// The _type field is always searchable.
return true;
}
@Override
public Query termQuery(Object value, SearchExecutionContext context) {
return SortedSetDocValuesField.newSlowExactQuery(name(), indexedValueForSearch(value));
}
@Override
public Query termsQuery(Collection<?> values, SearchExecutionContext context) {
var bytesRefs = values.stream().map(this::indexedValueForSearch).toList();
return SortedSetDocValuesField.newSlowSetQuery(name(), bytesRefs);
}
@Override
public Query rangeQuery(
Object lowerTerm,
Object upperTerm,
boolean includeLower,
boolean includeUpper,
SearchExecutionContext context
) {
return SortedSetDocValuesField.newSlowRangeQuery(
name(),
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
upperTerm == null ? null : indexedValueForSearch(upperTerm),
includeLower,
includeUpper
);
}
@Override
public boolean mayExistInIndex(SearchExecutionContext context) {
return true;
}
@Override
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
return new StoredValueFetcher(context.lookup(), NAME);
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
}
| LegacyTypeFieldType |
java | apache__camel | dsl/camel-kamelet-main/src/main/java/org/apache/camel/main/stub/StubLanguage.java | {
"start": 1154,
"end": 1493
} | class ____ extends ServiceSupport implements Language {
@Override
public Predicate createPredicate(String expression) {
return PredicateBuilder.constant(true);
}
@Override
public Expression createExpression(String expression) {
return ExpressionBuilder.constantExpression(expression);
}
}
| StubLanguage |
java | quarkusio__quarkus | extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/AddNodeSelectorDecorator.java | {
"start": 254,
"end": 1012
} | class ____ extends NamedResourceDecorator<PodSpecFluent<?>> {
private final String nodeSelectorKey;
private final String nodeSelectorValue;
public AddNodeSelectorDecorator(String deploymentName, String nodeSelectorKey, String nodeSelectorValue) {
super(deploymentName);
this.nodeSelectorKey = nodeSelectorKey;
this.nodeSelectorValue = nodeSelectorValue;
}
public void andThenVisit(PodSpecFluent<?> podSpec, ObjectMeta resourceMeta) {
if (Strings.isNotNullOrEmpty(nodeSelectorKey) && Strings.isNotNullOrEmpty(nodeSelectorValue)) {
podSpec.removeFromNodeSelector(nodeSelectorKey);
podSpec.addToNodeSelector(nodeSelectorKey, nodeSelectorValue);
}
}
}
| AddNodeSelectorDecorator |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/onetoone/pkjoincolumn/OneToOnePkJoinColumnNoMapsIdTest.java | {
"start": 2418,
"end": 3044
} | class ____ implements Serializable {
@Id
@Column(name = CHILD_ID)
private Long id;
// this is an alternative to @MapsId, and was
// the way to do it in older versions of JPA,
// but has the disadvantages that:
// a) you need to map the column twice, and
// b) you need to manually assign the id
@OneToOne(optional = false)
@JoinColumn(name = CHILD_ID)
private Parent parent;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Parent getParent() {
return parent;
}
public void setParent(Parent parent) {
this.parent = parent;
}
}
}
| Child |
java | alibaba__nacos | test/core-test/src/test/java/com/alibaba/nacos/test/core/auth/ConfigAuthCoreITCase.java | {
"start": 2022,
"end": 8153
} | class ____ extends AuthBase {
public static final long TIME_OUT = 2000;
public ConfigService iconfig = null;
@LocalServerPort
private int port;
private final String dataId = "yanlin";
private final String group = "yanlin";
@BeforeEach
void init() throws Exception {
super.init(port);
}
/**
* Cleans up resources after each test execution.
*/
@AfterEach
public void destroy() {
super.destroy();
try {
if (iconfig != null) {
iconfig.shutDown();
}
} catch (NacosException ex) {
// Ignored exception during shutdown
}
}
@Test
void writeWithReadPermission() throws Exception {
// Construct configService:
properties.put(PropertyKeyConst.USERNAME, username1);
properties.put(PropertyKeyConst.PASSWORD, password1);
properties.put(PropertyKeyConst.NAMESPACE, namespace1);
iconfig = NacosFactory.createConfigService(properties);
final String content = "test";
assertFalse(iconfig.publishConfig(dataId, group, content));
assertFalse(iconfig.removeConfig(dataId, group));
}
@Test
void readWithReadPermission() throws Exception {
properties.put(PropertyKeyConst.USERNAME, username1);
properties.put(PropertyKeyConst.PASSWORD, password1);
iconfig = NacosFactory.createConfigService(properties);
final String content = "test" + System.currentTimeMillis();
System.out.println(content);
CountDownLatch latch = new CountDownLatch(1);
iconfig.addListener(dataId, group, new AbstractConfigChangeListener() {
@Override
public void receiveConfigChange(ConfigChangeEvent event) {
ConfigChangeItem cci = event.getChangeItem("content");
System.out.println("content:" + cci);
if (!content.equals(cci.getNewValue())) {
return;
}
latch.countDown();
}
});
TimeUnit.SECONDS.sleep(3L);
properties.put(PropertyKeyConst.USERNAME, username2);
properties.put(PropertyKeyConst.PASSWORD, password2);
ConfigService configService = NacosFactory.createConfigService(properties);
boolean result = configService.publishConfig(dataId, group, content);
assertTrue(result);
TimeUnit.SECONDS.sleep(5L);
String res = iconfig.getConfig(dataId, group, TIME_OUT);
assertEquals(content, res);
latch.await(5L, TimeUnit.SECONDS);
assertEquals(0, latch.getCount());
}
@Test
void writeWithWritePermission() throws Exception {
// Construct configService:
properties.put(PropertyKeyConst.USERNAME, username2);
properties.put(PropertyKeyConst.PASSWORD, password2);
iconfig = NacosFactory.createConfigService(properties);
final String content = "test";
boolean res = iconfig.publishConfig(dataId, group, content);
assertTrue(res);
res = iconfig.removeConfig(dataId, group);
assertTrue(res);
}
@Test
void readWithWritePermission() throws Exception {
properties.put(PropertyKeyConst.NAMESPACE, namespace1);
properties.put(PropertyKeyConst.USERNAME, username2);
properties.put(PropertyKeyConst.PASSWORD, password2);
iconfig = NacosFactory.createConfigService(properties);
final String content = "test" + System.currentTimeMillis();
CountDownLatch latch = new CountDownLatch(1);
iconfig.addListener(dataId, group, new AbstractConfigChangeListener() {
@Override
public void receiveConfigChange(ConfigChangeEvent event) {
ConfigChangeItem cci = event.getChangeItem("content");
System.out.println("content:" + cci);
if (!content.equals(cci.getNewValue())) {
return;
}
latch.countDown();
}
});
TimeUnit.SECONDS.sleep(3L);
boolean result = iconfig.publishConfig(dataId, group, content);
assertTrue(result);
TimeUnit.SECONDS.sleep(5L);
try {
iconfig.getConfig(dataId, group, TIME_OUT);
fail();
} catch (NacosException e) {
assertEquals(HttpStatus.SC_FORBIDDEN, e.getErrCode());
}
latch.await(5L, TimeUnit.SECONDS);
assertTrue(latch.getCount() > 0);
}
@Test
void readWriteWithFullPermission() throws Exception {
properties.put(PropertyKeyConst.USERNAME, username3);
properties.put(PropertyKeyConst.PASSWORD, password3);
iconfig = NacosFactory.createConfigService(properties);
final String content = "test" + System.currentTimeMillis();
CountDownLatch latch = new CountDownLatch(1);
iconfig.addListener(dataId, group, new AbstractConfigChangeListener() {
@Override
public void receiveConfigChange(ConfigChangeEvent event) {
ConfigChangeItem cci = event.getChangeItem("content");
System.out.println("content:" + cci);
if (!content.equals(cci.getNewValue())) {
return;
}
latch.countDown();
}
});
TimeUnit.SECONDS.sleep(3L);
boolean result = iconfig.publishConfig(dataId, group, content);
assertTrue(result);
TimeUnit.SECONDS.sleep(5L);
String res = iconfig.getConfig(dataId, group, TIME_OUT);
assertEquals(content, res);
latch.await(5L, TimeUnit.SECONDS);
assertEquals(0, latch.getCount());
result = iconfig.removeConfig(dataId, group);
assertTrue(result);
}
}
| ConfigAuthCoreITCase |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/dataformat/JaxbDataFormat.java | {
"start": 10157,
"end": 10956
} | class ____ JAXB's unmarshaler.
*/
public void setFragment(String fragment) {
this.fragment = fragment;
}
public String getFragment() {
return fragment;
}
public String getFilterNonXmlChars() {
return filterNonXmlChars;
}
/**
* To ignore non xml characheters and replace them with an empty space.
*/
public void setFilterNonXmlChars(String filterNonXmlChars) {
this.filterNonXmlChars = filterNonXmlChars;
}
public String getEncoding() {
return encoding;
}
/**
* To overrule and use a specific encoding
*/
public void setEncoding(String encoding) {
this.encoding = encoding;
}
public String getPartClass() {
return partClass;
}
/**
* Name of | to |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/HttpSecurityRequestMatchersTests.java | {
"start": 11246,
"end": 11885
} | class ____ {
@Bean
PathPatternRequestMatcherBuilderFactoryBean requestMatcherBuilder() {
return new PathPatternRequestMatcherBuilderFactoryBean();
}
@Bean
SecurityFilterChain filterChain(HttpSecurity http, PathPatternRequestMatcher.Builder builder) throws Exception {
// @formatter:off
http
.securityMatchers((secure) -> secure
.requestMatchers(builder.matcher("/path"))
)
.httpBasic(withDefaults())
.authorizeHttpRequests((authorize) -> authorize
.anyRequest().denyAll()
);
return http.build();
// @formatter:on
}
@RestController
static | RequestMatchersMvcMatcherInLambdaConfig |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/Operators.java | {
"start": 63660,
"end": 68801
} | class ____<I, O>
implements InnerOperator<I, O>,
Fuseable,
QueueSubscription<O> {
protected final CoreSubscriber<? super O> actual;
/**
* The value stored by this Mono operator. Strongly prefer using {@link #setValue(Object)}
* rather than direct writes to this field, when possible.
*/
protected @Nullable O value;
volatile int state; //see STATE field updater
public MonoSubscriber(CoreSubscriber<? super O> actual) {
this.actual = actual;
}
@Override
public void cancel() {
O v = value;
value = null;
STATE.set(this, CANCELLED);
discard(v);
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.CANCELLED) return isCancelled();
if (key == Attr.TERMINATED) return state == HAS_REQUEST_HAS_VALUE || state == NO_REQUEST_HAS_VALUE;
if (key == Attr.PREFETCH) return Integer.MAX_VALUE;
return InnerOperator.super.scanUnsafe(key);
}
@Override
public final void clear() {
this.value = null;
}
/**
* Tries to emit the value and complete the underlying subscriber or
* stores the value away until there is a request for it.
* <p>
* Make sure this method is called at most once
* @param v the value to emit
*/
public final void complete(@Nullable O v) {
for (; ; ) {
int state = this.state;
// if state is >= HAS_CANCELLED or bit zero is set (*_HAS_VALUE) case, return
if ((state & ~HAS_REQUEST_NO_VALUE) != 0) {
this.value = null;
discard(v);
return;
}
if (state == HAS_REQUEST_NO_VALUE && STATE.compareAndSet(this, HAS_REQUEST_NO_VALUE, HAS_REQUEST_HAS_VALUE)) {
this.value = null;
Subscriber<? super O> a = actual;
a.onNext(v);
a.onComplete();
return;
}
setValue(v);
if (state == NO_REQUEST_NO_VALUE && STATE.compareAndSet(this, NO_REQUEST_NO_VALUE, NO_REQUEST_HAS_VALUE)) {
return;
}
}
}
/**
* Discard the given value, generally this.value field. Lets derived subscriber with further knowledge about
* the possible types of the value discard such values in a specific way. Note that fields should generally be
* nulled out along the discard call.
*
* @param v the value to discard
*/
protected void discard(@Nullable O v) {
Operators.onDiscard(v, actual.currentContext());
}
@Override
public final CoreSubscriber<? super O> actual() {
return actual;
}
/**
* Returns true if this Subscription has been cancelled.
* @return true if this Subscription has been cancelled
*/
public final boolean isCancelled() {
return state == CANCELLED;
}
@Override
public final boolean isEmpty() {
return true;
}
@Override
public void onComplete() {
actual.onComplete();
}
@Override
public void onError(Throwable t) {
actual.onError(t);
}
@Override
@SuppressWarnings("unchecked")
public void onNext(I t) {
setValue((O) t);
}
@Override
public void onSubscribe(Subscription s) {
//if upstream
}
@Override
public final @Nullable O poll() {
return null;
}
@Override
public void request(long n) {
if (validate(n)) {
for (; ; ) {
int s = state;
if (s == CANCELLED) {
return;
}
// if any bits 1-31 are set, request(n) has been called (HAS_REQUEST_*)
if ((s & ~NO_REQUEST_HAS_VALUE) != 0) {
return;
}
if (s == NO_REQUEST_HAS_VALUE && STATE.compareAndSet(this, NO_REQUEST_HAS_VALUE, HAS_REQUEST_HAS_VALUE)) {
O v = value;
if (v != null) {
value = null;
Subscriber<? super O> a = actual;
a.onNext(v);
a.onComplete();
}
return;
}
if (STATE.compareAndSet(this, NO_REQUEST_NO_VALUE, HAS_REQUEST_NO_VALUE)) {
return;
}
}
}
}
@Override
public int requestFusion(int mode) {
return NONE;
}
/**
* Set the value internally, without impacting request tracking state.
* This however discards the provided value when detecting a cancellation.
*
* @param value the new value.
* @see #complete(Object)
*/
public void setValue(@Nullable O value) {
if (STATE.get(this) == CANCELLED) {
discard(value);
return;
}
this.value = value;
}
@Override
public int size() {
return isEmpty() ? 0 : 1;
}
/**
* Indicates this Subscription has no value and not requested yet.
*/
static final int NO_REQUEST_NO_VALUE = 0;
/**
* Indicates this Subscription has a value but not requested yet.
*/
static final int NO_REQUEST_HAS_VALUE = 1;
/**
* Indicates this Subscription has been requested but there is no value yet.
*/
static final int HAS_REQUEST_NO_VALUE = 2;
/**
* Indicates this Subscription has both request and value.
*/
static final int HAS_REQUEST_HAS_VALUE = 3;
/**
* Indicates the Subscription has been cancelled.
*/
static final int CANCELLED = 4;
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<MonoSubscriber> STATE =
AtomicIntegerFieldUpdater.newUpdater(MonoSubscriber.class, "state");
}
static abstract | MonoSubscriber |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/Result.java | {
"start": 1723,
"end": 1839
} | class ____ and package name.
* @see org.apache.dubbo.rpc.Invoker#invoke(Invocation)
* @see AppResponse
*/
public | name |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java | {
"start": 130825,
"end": 131575
} | class ____ {
@RequestMapping(value = "/myPath.do", method = RequestMethod.DELETE)
public void delete() {
}
@RequestMapping(value = "/myPath.do", method = RequestMethod.HEAD)
public void head() {
}
@RequestMapping(value = "/myPath.do", method = RequestMethod.OPTIONS)
public void options() {
}
@RequestMapping(value = "/myPath.do", method = RequestMethod.POST)
public void post() {
}
@RequestMapping(value = "/myPath.do", method = RequestMethod.PUT)
public void put() {
}
@RequestMapping(value = "/myPath.do", method = RequestMethod.TRACE)
public void trace() {
}
@RequestMapping(value = "/otherPath.do", method = RequestMethod.GET)
public void get() {
}
}
@Controller
static | MethodNotAllowedController |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/floats/Floats_assertIsNotFinite_Test.java | {
"start": 1260,
"end": 2275
} | class ____ extends FloatsBaseTest {
@ParameterizedTest
@ValueSource(floats = {
Float.POSITIVE_INFINITY,
Float.NEGATIVE_INFINITY,
Float.NaN
})
void should_succeed_when_actual_is_not_finite(float actual) {
// WHEN/THEN
floats.assertIsNotFinite(someInfo(), actual);
}
@ParameterizedTest
@ValueSource(floats = {
Float.MAX_VALUE,
Float.MIN_NORMAL,
Float.MIN_VALUE,
0.0f,
1.0f,
-1.0f,
})
void should_fail_when_actual_is_finite(float actual) {
// WHEN
var assertionError = expectAssertionError(() -> floats.assertIsNotFinite(someInfo(), actual));
// THEN
then(assertionError).hasMessage(shouldNotBeFinite(actual).create());
}
@Test
void should_fail_if_actual_is_null() {
// GIVEN
Float actual = null;
// WHEN
var assertionError = expectAssertionError(() -> floats.assertIsNotFinite(someInfo(), actual));
// THEN
then(assertionError).hasMessage(actualIsNull());
}
}
| Floats_assertIsNotFinite_Test |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DSConstants.java | {
"start": 1117,
"end": 1937
} | class ____ {
/**
* Environment key name pointing to the shell script's location
*/
public static final String DISTRIBUTEDSHELLSCRIPTLOCATION = "DISTRIBUTEDSHELLSCRIPTLOCATION";
/**
* Environment key name denoting the file timestamp for the shell script.
* Used to validate the local resource.
*/
public static final String DISTRIBUTEDSHELLSCRIPTTIMESTAMP = "DISTRIBUTEDSHELLSCRIPTTIMESTAMP";
/**
* Environment key name denoting the file content length for the shell script.
* Used to validate the local resource.
*/
public static final String DISTRIBUTEDSHELLSCRIPTLEN = "DISTRIBUTEDSHELLSCRIPTLEN";
/**
* Environment key name denoting the timeline domain ID.
*/
public static final String DISTRIBUTEDSHELLTIMELINEDOMAIN = "DISTRIBUTEDSHELLTIMELINEDOMAIN";
}
| DSConstants |
java | square__retrofit | retrofit-mock/src/test/java/retrofit2/mock/CallsTest.java | {
"start": 1175,
"end": 9899
} | class ____ {
@Test
public void bodyExecute() throws IOException {
Call<String> taco = Calls.response("Taco");
assertEquals("Taco", taco.execute().body());
}
@Test
public void bodyEnqueue() throws IOException {
Call<String> taco = Calls.response("Taco");
final AtomicReference<Response<String>> responseRef = new AtomicReference<>();
taco.enqueue(
new Callback<String>() {
@Override
public void onResponse(Call<String> call, Response<String> response) {
responseRef.set(response);
}
@Override
public void onFailure(Call<String> call, Throwable t) {
fail();
}
});
assertThat(responseRef.get().body()).isEqualTo("Taco");
}
@Test
public void responseExecute() throws IOException {
Response<String> response = Response.success("Taco");
Call<String> taco = Calls.response(response);
assertFalse(taco.isExecuted());
assertSame(response, taco.execute());
assertTrue(taco.isExecuted());
try {
taco.execute();
fail();
} catch (IllegalStateException e) {
assertThat(e).hasMessageThat().isEqualTo("Already executed");
}
}
@Test
public void responseEnqueue() {
Response<String> response = Response.success("Taco");
Call<String> taco = Calls.response(response);
assertFalse(taco.isExecuted());
final AtomicReference<Response<String>> responseRef = new AtomicReference<>();
taco.enqueue(
new Callback<String>() {
@Override
public void onResponse(Call<String> call, Response<String> response) {
responseRef.set(response);
}
@Override
public void onFailure(Call<String> call, Throwable t) {
fail();
}
});
assertSame(response, responseRef.get());
assertTrue(taco.isExecuted());
try {
taco.enqueue(
new Callback<String>() {
@Override
public void onResponse(Call<String> call, Response<String> response) {
fail();
}
@Override
public void onFailure(Call<String> call, Throwable t) {
fail();
}
});
fail();
} catch (IllegalStateException e) {
assertThat(e).hasMessageThat().isEqualTo("Already executed");
}
}
@Test
public void enqueueNullThrows() {
Call<String> taco = Calls.response("Taco");
try {
taco.enqueue(null);
fail();
} catch (NullPointerException e) {
assertThat(e).hasMessageThat().isEqualTo("callback == null");
}
}
@Test
public void responseCancelExecute() {
Call<String> taco = Calls.response(Response.success("Taco"));
assertFalse(taco.isCanceled());
taco.cancel();
assertTrue(taco.isCanceled());
try {
taco.execute();
fail();
} catch (IOException e) {
assertThat(e).hasMessageThat().isEqualTo("canceled");
}
}
@Test
public void responseCancelEnqueue() throws IOException {
Call<String> taco = Calls.response(Response.success("Taco"));
assertFalse(taco.isCanceled());
taco.cancel();
assertTrue(taco.isCanceled());
final AtomicReference<Throwable> failureRef = new AtomicReference<>();
taco.enqueue(
new Callback<String>() {
@Override
public void onResponse(Call<String> call, Response<String> response) {
fail();
}
@Override
public void onFailure(Call<String> call, Throwable t) {
failureRef.set(t);
}
});
Throwable failure = failureRef.get();
assertThat(failure).isInstanceOf(IOException.class);
assertThat(failure).hasMessageThat().isEqualTo("canceled");
}
@Test
public void failureExecute() {
IOException failure = new IOException("Hey");
Call<Object> taco = Calls.failure(failure);
assertFalse(taco.isExecuted());
try {
taco.execute();
fail();
} catch (IOException e) {
assertSame(failure, e);
}
assertTrue(taco.isExecuted());
}
@Test
public void failureExecuteCheckedException() {
CertificateException failure = new CertificateException("Hey");
Call<Object> taco = Calls.failure(failure);
assertFalse(taco.isExecuted());
try {
taco.execute();
fail();
} catch (Exception e) {
assertSame(failure, e);
}
assertTrue(taco.isExecuted());
}
@Test
public void failureEnqueue() {
IOException failure = new IOException("Hey");
Call<Object> taco = Calls.failure(failure);
assertFalse(taco.isExecuted());
final AtomicReference<Throwable> failureRef = new AtomicReference<>();
taco.enqueue(
new Callback<Object>() {
@Override
public void onResponse(Call<Object> call, Response<Object> response) {
fail();
}
@Override
public void onFailure(Call<Object> call, Throwable t) {
failureRef.set(t);
}
});
assertSame(failure, failureRef.get());
assertTrue(taco.isExecuted());
}
@Test
public void cloneHasOwnState() throws IOException {
Call<String> taco = Calls.response("Taco");
assertEquals("Taco", taco.execute().body());
Call<String> anotherTaco = taco.clone();
assertFalse(anotherTaco.isExecuted());
assertEquals("Taco", anotherTaco.execute().body());
assertTrue(anotherTaco.isExecuted());
}
@Test
public void deferredReturnExecute() throws IOException {
Call<Integer> counts =
Calls.defer(
new Callable<Call<Integer>>() {
private int count = 0;
@Override
public Call<Integer> call() throws Exception {
return Calls.response(++count);
}
});
Call<Integer> a = counts.clone();
Call<Integer> b = counts.clone();
assertEquals(1, b.execute().body().intValue());
assertEquals(2, a.execute().body().intValue());
}
@Test
public void deferredReturnEnqueue() {
Call<Integer> counts =
Calls.defer(
new Callable<Call<Integer>>() {
private int count = 0;
@Override
public Call<Integer> call() throws Exception {
return Calls.response(++count);
}
});
Call<Integer> a = counts.clone();
Call<Integer> b = counts.clone();
final AtomicReference<Response<Integer>> responseRef = new AtomicReference<>();
Callback<Integer> callback =
new Callback<Integer>() {
@Override
public void onResponse(Call<Integer> call, Response<Integer> response) {
responseRef.set(response);
}
@Override
public void onFailure(Call<Integer> call, Throwable t) {
fail();
}
};
b.enqueue(callback);
assertEquals(1, responseRef.get().body().intValue());
a.enqueue(callback);
assertEquals(2, responseRef.get().body().intValue());
}
@Test
public void deferredThrowExecute() throws IOException {
final IOException failure = new IOException("Hey");
Call<Object> failing =
Calls.defer(
() -> {
throw failure;
});
try {
failing.execute();
fail();
} catch (IOException e) {
assertSame(failure, e);
}
}
@Test
public void deferredThrowEnqueue() {
final IOException failure = new IOException("Hey");
Call<Object> failing =
Calls.defer(
() -> {
throw failure;
});
final AtomicReference<Throwable> failureRef = new AtomicReference<>();
failing.enqueue(
new Callback<Object>() {
@Override
public void onResponse(Call<Object> call, Response<Object> response) {
fail();
}
@Override
public void onFailure(Call<Object> call, Throwable t) {
failureRef.set(t);
}
});
assertSame(failure, failureRef.get());
}
@Test
public void deferredThrowUncheckedExceptionEnqueue() {
final RuntimeException failure = new RuntimeException("Hey");
final AtomicReference<Throwable> failureRef = new AtomicReference<>();
Calls.failure(failure)
.enqueue(
new Callback<Object>() {
@Override
public void onResponse(Call<Object> call, Response<Object> response) {
fail();
}
@Override
public void onFailure(Call<Object> call, Throwable t) {
failureRef.set(t);
}
});
assertSame(failure, failureRef.get());
}
}
| CallsTest |
java | reactor__reactor-core | reactor-core/src/withMicrometerTest/java/reactor/core/publisher/FluxTapTest.java | {
"start": 18579,
"end": 23355
} | class ____ {
@Test
void implementationSmokeTest() {
Flux<Integer> fuseableSource = Flux.just(1);
//the TestSubscriber "requireFusion" configuration below is intentionally inverted
//so that an exception describing the actual Subscription is thrown when calling block()
TestSubscriber<Integer> testSubscriberForFuseable = TestSubscriber.builder().requireNotFuseable().build();
Flux<Integer> fuseable = fuseableSource.tap(TestSignalListener::new);
assertThat(fuseableSource).as("smoke test fuseableSource").isInstanceOf(Fuseable.class);
assertThat(fuseable).as("fuseable").isInstanceOf(FluxTapFuseable.class);
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> fuseable.subscribeWith(testSubscriberForFuseable).block())
.as("TapFuseableSubscriber")
.withMessageContaining("got reactor.core.publisher.FluxTapFuseable$TapFuseableSubscriber");
}
@Test
void throwingCreateListener() {
TestSubscriber<Integer> testSubscriber = TestSubscriber.create();
FluxTapFuseable<Integer, Void> test = new FluxTapFuseable<>(Flux.just(1),
new SignalListenerFactory<Integer, Void>() {
@Override
public Void initializePublisherState(Publisher<? extends Integer> source) {
return null;
}
@Override
public SignalListener<Integer> createListener(Publisher<? extends Integer> source,
ContextView listenerContext, Void publisherContext) {
throw new IllegalStateException("expected");
}
});
assertThatCode(() -> test.subscribeOrReturn(testSubscriber))
.doesNotThrowAnyException();
assertThat(testSubscriber.expectTerminalError())
.as("downstream error")
.isInstanceOf(IllegalStateException.class)
.hasMessage("expected");
}
//doFirst is invoked from each publisher's subscribeOrReturn
@Test
void doFirst() {
TestSubscriber<Integer> testSubscriber = TestSubscriber.create();
TestSignalListener<Integer> listener = new TestSignalListener<>();
FluxTapFuseable<Integer, Void> test = new FluxTapFuseable<>(Flux.just(1), factoryOf(listener));
assertThatCode(() -> test.subscribeOrReturn(testSubscriber))
.doesNotThrowAnyException();
assertThat(listener.listenerErrors).as("listenerErrors").isEmpty();
assertThat(listener.events)
.as("events")
.containsExactly(
"doFirst"
);
}
@Test
void throwingAlterContext() {
TestSubscriber<Integer> testSubscriber = TestSubscriber.create();
TestSignalListener<Integer> testSignalListener =
new TestSignalListener<Integer>() {
@Override
public Context addToContext(Context originalContext) {
throw new IllegalStateException("expected");
}
};
FluxTapFuseable<Integer, Void> test = new FluxTapFuseable<>(
Flux.just(1), factoryOf(testSignalListener));
assertThatCode(() -> test.subscribeOrReturn(testSubscriber))
.doesNotThrowAnyException();
assertThat(testSubscriber.expectTerminalError())
.as("downstream error")
.isInstanceOf(IllegalStateException.class)
.hasMessage("Unable to augment tap Context at subscription via addToContext")
.extracting(Throwable::getCause)
.satisfies(t -> assertThat(t)
.isInstanceOf(IllegalStateException.class)
.hasMessage("expected"));
assertThat(testSignalListener.listenerErrors)
.as("listenerErrors")
.satisfies(errors -> {
assertThat(errors.size()).isEqualTo(1);
assertThat(errors.stream().findFirst().get())
.isInstanceOf(IllegalStateException.class)
.hasMessage("Unable to augment tap Context at subscription via addToContext")
.extracting(Throwable::getCause)
.satisfies(t -> assertThat(t)
.isInstanceOf(IllegalStateException.class)
.hasMessage("expected"));
});
assertThat(testSignalListener.events)
.containsExactly("doFirst");
}
@Test
void doFirstListenerError() {
Throwable listenerError = new IllegalStateException("expected from doFirst");
TestSubscriber<Integer> testSubscriber = TestSubscriber.create();
TestSignalListener<Integer> listener = new TestSignalListener<Integer>() {
@Override
public void doFirst() throws Throwable {
throw listenerError;
}
};
FluxTapFuseable<Integer, Void> test = new FluxTapFuseable<>(Flux.just(1), factoryOf(listener));
assertThatCode(() -> test.subscribeOrReturn(testSubscriber))
.doesNotThrowAnyException();
assertThat(listener.listenerErrors)
.as("listenerErrors")
.containsExactly(listenerError);
assertThat(listener.events)
.as("events")
.isEmpty();
}
//TODO test clear/size/isEmpty
//TODO test poll
//TODO test ASYNC fusion onNext
//TODO test SYNC fusion onNext
}
@Nested
| FluxTapFuseableTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/tree/domain/SqmEmbeddableDomainType.java | {
"start": 319,
"end": 582
} | interface ____<E> extends EmbeddableDomainType<E>, SqmTreatableDomainType<E> {
@Override
default @Nullable SqmDomainType<E> getSqmType() {
return this;
}
@Override
default SqmEmbeddableDomainType<E> getPathType() {
return this;
}
}
| SqmEmbeddableDomainType |
java | elastic__elasticsearch | x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java | {
"start": 1629,
"end": 11262
} | class ____ extends ESTestCase {
private ByteBuffersDirectory readOnlyDirectory;
private InMemoryNoOpCommitDirectory inMemoryNoOpCommitDirectory;
@Before
public void createDirectories() {
readOnlyDirectory = new ByteBuffersDirectory(NoLockFactory.INSTANCE);
inMemoryNoOpCommitDirectory = new InMemoryNoOpCommitDirectory(new FilterDirectory(readOnlyDirectory) {
// wrapper around readOnlyDirectory to assert that we make no attempt to write to it
@Override
public void deleteFile(String name) {
throw new AssertionError("not supported");
}
@Override
public IndexOutput createOutput(String name, IOContext context) {
throw new AssertionError("not supported");
}
@Override
public IndexOutput createTempOutput(String prefix, String suffix, IOContext context) {
throw new AssertionError("not supported");
}
@Override
public void rename(String source, String dest) {
throw new AssertionError("not supported");
}
@Override
public Lock obtainLock(String name) {
throw new AssertionError("not supported");
}
@Override
public Set<String> getPendingDeletions() {
throw new AssertionError("not supported");
}
@Override
public void copyFrom(Directory from, String src, String dest, IOContext context) {
throw new AssertionError("not supported");
}
});
}
@After
public void closeDirectories() throws IOException {
inMemoryNoOpCommitDirectory.close();
expectThrows(AlreadyClosedException.class, () -> readOnlyDirectory.listAll());
}
public void testAllowsWritingSegmentsFiles() throws IOException {
assertCanWrite("segments_" + randomAlphaOfLength(10));
assertCanWrite("pending_segments_" + randomAlphaOfLength(10));
assertCanWrite("recovery." + randomAlphaOfLength(10) + ".segments_" + randomAlphaOfLength(10));
}
public void testForbidsWritingOtherFiles() {
expectThrows(IllegalArgumentException.class, () -> assertCanWrite("not_a_segments_file"));
}
private void assertCanWrite(String name) throws IOException {
final String s = randomAlphaOfLength(10);
try (IndexOutput output = inMemoryNoOpCommitDirectory.createOutput(name, IOContext.DEFAULT)) {
output.writeString(s);
}
try (IndexInput input = inMemoryNoOpCommitDirectory.openInput(name, IOContext.DEFAULT)) {
assertThat(input.readString(), equalTo(s));
}
if (randomBoolean()) {
inMemoryNoOpCommitDirectory.sync(singletonList(name));
}
if (randomBoolean()) {
inMemoryNoOpCommitDirectory.syncMetaData();
}
assertThat(inMemoryNoOpCommitDirectory.fileLength(name), equalTo((long) StandardCharsets.UTF_8.encode(s).array().length));
assertThat(Arrays.asList(inMemoryNoOpCommitDirectory.listAll()), hasItem(name));
inMemoryNoOpCommitDirectory.deleteFile(name);
assertThat(Arrays.asList(inMemoryNoOpCommitDirectory.listAll()), not(hasItem(name)));
}
public void testExposesFileFromRealDirectory() throws IOException {
final String name = randomAlphaOfLength(10);
assertExposesRealFiles(name);
expectThrows(IllegalArgumentException.class, () -> inMemoryNoOpCommitDirectory.deleteFile(name));
assertThat(Arrays.asList(inMemoryNoOpCommitDirectory.listAll()), hasItem(name));
}
public void testEmulatesAttemptsToDeleteInnerSegmentsFiles() throws IOException {
final String name = "segments_" + randomAlphaOfLength(10);
assertExposesRealFiles(name);
inMemoryNoOpCommitDirectory.deleteFile(name); // no-op
assertThat(Arrays.asList(readOnlyDirectory.listAll()), hasItem(name));
assertThat(Arrays.asList(inMemoryNoOpCommitDirectory.listAll()), not(hasItem(name)));
readOnlyDirectory.deleteFile(name);
assertThat(Arrays.asList(readOnlyDirectory.listAll()), not(hasItem(name)));
assertThat(Arrays.asList(inMemoryNoOpCommitDirectory.listAll()), not(hasItem(name)));
}
private void assertExposesRealFiles(String name) throws IOException {
final String s = randomAlphaOfLength(10);
try (IndexOutput output = readOnlyDirectory.createOutput(name, IOContext.DEFAULT)) {
output.writeString(s);
}
try (IndexInput input = inMemoryNoOpCommitDirectory.openInput(name, IOContext.DEFAULT)) {
assertThat(input.readString(), equalTo(s));
}
assertThat(inMemoryNoOpCommitDirectory.fileLength(name), equalTo((long) StandardCharsets.UTF_8.encode(s).array().length));
assertThat(Arrays.asList(inMemoryNoOpCommitDirectory.listAll()), hasItem(name));
}
public void testSupportsNoOpCommits() throws IOException {
try (IndexWriter indexWriter = new IndexWriter(readOnlyDirectory, new IndexWriterConfig())) {
final Document document = new Document();
document.add(new TextField("foo", "bar", Field.Store.YES));
indexWriter.addDocument(document);
indexWriter.setLiveCommitData(singletonMap("user_data", "original").entrySet());
indexWriter.commit();
}
try (DirectoryReader directoryReader = DirectoryReader.open(inMemoryNoOpCommitDirectory)) {
assertThat(directoryReader.getIndexCommit().getUserData().get("user_data"), equalTo("original"));
final TopDocs topDocs = newSearcher(directoryReader).search(new MatchAllDocsQuery(), 1);
assertThat(topDocs.totalHits, equalTo(new TotalHits(1L, TotalHits.Relation.EQUAL_TO)));
assertThat(topDocs.scoreDocs.length, equalTo(1));
assertThat(directoryReader.storedFields().document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar"));
}
try (IndexWriter indexWriter = new IndexWriter(inMemoryNoOpCommitDirectory, new IndexWriterConfig())) {
indexWriter.setLiveCommitData(singletonMap("user_data", "updated").entrySet());
indexWriter.commit();
}
try (DirectoryReader directoryReader = DirectoryReader.open(inMemoryNoOpCommitDirectory)) {
assertThat(directoryReader.getIndexCommit().getUserData().get("user_data"), equalTo("updated"));
}
}
public void testRejectsDocumentChanges() throws IOException {
if (randomBoolean()) {
try (IndexWriter indexWriter = new IndexWriter(readOnlyDirectory, new IndexWriterConfig())) {
final Document document = new Document();
document.add(new TextField("foo", "bar", Field.Store.YES));
indexWriter.addDocument(document);
indexWriter.commit();
}
}
try (IndexWriter indexWriter = new IndexWriter(inMemoryNoOpCommitDirectory, new IndexWriterConfig())) {
final Document document = new Document();
document.add(new TextField("foo", "baz", Field.Store.YES));
expectThrows(IllegalArgumentException.class, () -> {
indexWriter.addDocument(document);
indexWriter.commit();
});
}
}
public void testSupportsDeletes() throws IOException {
try (IndexWriter indexWriter = new IndexWriter(readOnlyDirectory, new IndexWriterConfig())) {
final Document document = new Document();
document.add(new TextField("foo", "bar", Field.Store.YES));
indexWriter.addDocument(document);
indexWriter.setLiveCommitData(singletonMap("user_data", "original").entrySet());
indexWriter.commit();
}
try (DirectoryReader directoryReader = DirectoryReader.open(inMemoryNoOpCommitDirectory)) {
assertThat(directoryReader.getIndexCommit().getUserData().get("user_data"), equalTo("original"));
final TopDocs topDocs = newSearcher(directoryReader).search(new MatchAllDocsQuery(), 1);
assertThat(topDocs.totalHits, equalTo(new TotalHits(1L, TotalHits.Relation.EQUAL_TO)));
assertThat(topDocs.scoreDocs.length, equalTo(1));
assertThat(directoryReader.storedFields().document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar"));
}
assertEquals(1, DirectoryReader.listCommits(inMemoryNoOpCommitDirectory).size());
try (
IndexWriter indexWriter = new IndexWriter(
inMemoryNoOpCommitDirectory,
new IndexWriterConfig().setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)
)
) {
indexWriter.setLiveCommitData(singletonMap("user_data", "updated").entrySet());
indexWriter.commit();
}
assertEquals(2, DirectoryReader.listCommits(inMemoryNoOpCommitDirectory).size());
try (IndexWriter indexWriter = new IndexWriter(inMemoryNoOpCommitDirectory, new IndexWriterConfig())) {
indexWriter.commit();
}
assertEquals(1, DirectoryReader.listCommits(inMemoryNoOpCommitDirectory).size());
try (DirectoryReader directoryReader = DirectoryReader.open(inMemoryNoOpCommitDirectory)) {
assertThat(directoryReader.getIndexCommit().getUserData().get("user_data"), equalTo("updated"));
}
}
}
| InMemoryNoOpCommitDirectoryTests |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/RollbackDefinition.java | {
"start": 1252,
"end": 3341
} | class ____ extends NoOutputDefinition<RollbackDefinition> {
@XmlAttribute
private String message;
@XmlAttribute
@Metadata(javaType = "java.lang.Boolean")
private String markRollbackOnly;
@XmlAttribute
@Metadata(label = "advanced", javaType = "java.lang.Boolean")
private String markRollbackOnlyLast;
public RollbackDefinition() {
}
protected RollbackDefinition(RollbackDefinition source) {
super(source);
this.message = source.message;
this.markRollbackOnly = source.markRollbackOnly;
this.markRollbackOnlyLast = source.markRollbackOnlyLast;
}
public RollbackDefinition(String message) {
this.message = message;
}
@Override
public RollbackDefinition copyDefinition() {
return new RollbackDefinition(this);
}
@Override
public String toString() {
if (message != null) {
return "Rollback[" + message + "]";
} else {
return "Rollback";
}
}
@Override
public String getShortName() {
return "rollback";
}
@Override
public String getLabel() {
return "rollback";
}
public String getMessage() {
return message;
}
/**
* Message to use in rollback exception
*/
public void setMessage(String message) {
this.message = message;
}
public String getMarkRollbackOnly() {
return markRollbackOnly;
}
/**
* Mark the transaction for rollback only (cannot be overruled to commit)
*/
public void setMarkRollbackOnly(String markRollbackOnly) {
this.markRollbackOnly = markRollbackOnly;
}
public String getMarkRollbackOnlyLast() {
return markRollbackOnlyLast;
}
/**
* Mark only last sub transaction for rollback only.
* <p/>
* When using sub transactions (if the transaction manager support this)
*/
public void setMarkRollbackOnlyLast(String markRollbackOnlyLast) {
this.markRollbackOnlyLast = markRollbackOnlyLast;
}
}
| RollbackDefinition |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/chararrays/CharArrays_assertHasSameSizeAs_with_Array_Test.java | {
"start": 1432,
"end": 2439
} | class ____ extends CharArraysBaseTest {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
actual = null;
// WHEN
ThrowingCallable code = () -> arrays.assertHasSameSizeAs(someInfo(), actual, array("Solo", "Leia"));
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessage(actualIsNull());
}
@Test
void should_fail_if_size_of_actual_is_not_equal_to_expected_size() {
// GIVEN
AssertionInfo info = someInfo();
String[] other = array("Solo", "Leia");
// WHEN
ThrowingCallable code = () -> arrays.assertHasSameSizeAs(info, actual, other);
// THEN
String error = shouldHaveSameSizeAs(actual, other, actual.length, other.length).create(null, info.representation());
assertThatAssertionErrorIsThrownBy(code).withMessage(error);
}
@Test
void should_pass_if_size_of_actual_is_equal_to_expected_size() {
arrays.assertHasSameSizeAs(someInfo(), actual, array("Solo", "Leia", "Luke"));
}
}
| CharArrays_assertHasSameSizeAs_with_Array_Test |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/MockStateWithoutExecutionGraphContext.java | {
"start": 1415,
"end": 2829
} | class ____
implements StateWithoutExecutionGraph.Context, AfterEachCallback {
private final StateValidator<ArchivedExecutionGraph> finishedStateValidator =
new StateValidator<>("Finished");
private boolean hasStateTransition = false;
private final JobID jobId = new JobID();
public void setExpectFinished(Consumer<ArchivedExecutionGraph> asserter) {
finishedStateValidator.expectInput(asserter);
}
@Override
public void goToFinished(ArchivedExecutionGraph archivedExecutionGraph) {
finishedStateValidator.validateInput(archivedExecutionGraph);
registerStateTransition();
}
@Override
public JobID getJobId() {
return jobId;
}
@Override
public ArchivedExecutionGraph getArchivedExecutionGraph(
JobStatus jobStatus, @Nullable Throwable cause) {
return new ArchivedExecutionGraphBuilder()
.setState(jobStatus)
.setFailureCause(cause == null ? null : new ErrorInfo(cause, 1337))
.build();
}
@Override
public void afterEach(ExtensionContext extensionContext) throws Exception {
finishedStateValidator.close();
}
public boolean hasStateTransition() {
return hasStateTransition;
}
public void registerStateTransition() {
hasStateTransition = true;
}
}
| MockStateWithoutExecutionGraphContext |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/postgresql/datatypes/BooleanTest.java | {
"start": 368,
"end": 1699
} | class ____ extends PGTest {
public void test_0() throws Exception {
String sql = "INSERT INTO test1 VALUES (TRUE, 'sic est');";
PGSQLStatementParser parser = new PGSQLStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
PGSchemaStatVisitor visitor = new PGSchemaStatVisitor();
statemen.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("test1")));
assertEquals(0, visitor.getColumns().size());
// assertTrue(visitor.getColumns().contains(new TableStat.Column("films", "kind")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("films", "code")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("films", "date_prod")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("films", "title")));
// assertTrue(visitor.getColumns().contains(new TableStat.Column("films", "did")));
}
}
| BooleanTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/ConstructorInitializationTest.java | {
"start": 7554,
"end": 7692
} | class ____ {
@Id
@GeneratedValue
private long id;
@Column(name = "is_open")
private boolean open = false;
}
}
| AccountPreferences |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/SQLPartitionByHash.java | {
"start": 712,
"end": 1884
} | class ____ extends SQLPartitionBy {
// for aliyun ads
protected boolean key;
protected boolean unique;
public boolean isKey() {
return key;
}
public void setKey(boolean key) {
this.key = key;
}
public boolean isUnique() {
return unique;
}
public void setUnique(boolean unique) {
this.unique = unique;
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, columns);
acceptChild(visitor, partitionsCount);
acceptChild(visitor, getPartitions());
acceptChild(visitor, subPartitionBy);
}
visitor.endVisit(this);
}
public SQLPartitionByHash clone() {
SQLPartitionByHash x = new SQLPartitionByHash();
cloneTo(x);
x.key = key;
x.unique = unique;
for (SQLExpr column : columns) {
SQLExpr c2 = column.clone();
c2.setParent(x);
x.columns.add(c2);
}
return x;
}
public void cloneTo(SQLPartitionByHash x) {
super.cloneTo(x);
}
}
| SQLPartitionByHash |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java | {
"start": 1526,
"end": 1651
} | class ____ build MockEngines like {@link MockInternalEngine}
* since they need to subclass the actual engine
*/
public final | to |
java | elastic__elasticsearch | x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/BaseSearchableSnapshotsIntegTestCase.java | {
"start": 4011,
"end": 17882
} | class ____ extends AbstractSnapshotIntegTestCase {
@Override
protected boolean addMockInternalEngine() {
return false;
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
plugins.add(BlobCachePlugin.class);
plugins.add(LocalStateSearchableSnapshots.class);
plugins.add(LicensedSnapshotBasedRecoveriesPlugin.class);
plugins.add(ForbiddenActionsPlugin.class);
return Collections.unmodifiableList(plugins);
}
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
final Settings settings;
{
final Settings initialSettings = super.nodeSettings(nodeOrdinal, otherSettings);
if (DiscoveryNode.canContainData(otherSettings)) {
settings = addRoles(initialSettings, Set.of(DiscoveryNodeRole.DATA_FROZEN_NODE_ROLE));
} else {
settings = initialSettings;
}
}
final Settings.Builder builder = Settings.builder().put(settings).put(SELF_GENERATED_LICENSE_TYPE.getKey(), "trial");
if (randomBoolean()) {
builder.put(
CacheService.SNAPSHOT_CACHE_RANGE_SIZE_SETTING.getKey(),
rarely()
? ByteSizeValue.of(randomIntBetween(4, 1024), ByteSizeUnit.KB)
: ByteSizeValue.of(randomIntBetween(1, 10), ByteSizeUnit.MB)
);
}
if (DiscoveryNode.canContainData(otherSettings) && randomBoolean()) {
builder.put(SharedBlobCacheService.SHARED_CACHE_SIZE_SETTING.getKey(), ByteSizeValue.ZERO.getStringRep());
}
builder.put(
SharedBlobCacheService.SHARED_CACHE_REGION_SIZE_SETTING.getKey(),
rarely()
? pageAligned(ByteSizeValue.of(randomIntBetween(4, 1024), ByteSizeUnit.KB))
: pageAligned(ByteSizeValue.of(randomIntBetween(1, 10), ByteSizeUnit.MB))
);
if (randomBoolean()) {
builder.put(
SharedBlobCacheService.SHARED_CACHE_RANGE_SIZE_SETTING.getKey(),
rarely()
? pageAligned(ByteSizeValue.of(randomIntBetween(4, 1024), ByteSizeUnit.KB))
: pageAligned(ByteSizeValue.of(randomIntBetween(1, 10), ByteSizeUnit.MB))
);
}
if (randomBoolean()) {
builder.put(
SharedBlobCacheService.SHARED_CACHE_RECOVERY_RANGE_SIZE_SETTING.getKey(),
rarely()
? pageAligned(ByteSizeValue.of(randomIntBetween(4, 1024), ByteSizeUnit.KB))
: pageAligned(ByteSizeValue.of(randomIntBetween(1, 10), ByteSizeUnit.MB))
);
}
return builder.build();
}
@After
public void waitForBlobCacheFillsToComplete() {
for (BlobStoreCacheService blobStoreCacheService : internalCluster().getDataNodeInstances(BlobStoreCacheService.class)) {
assertTrue(blobStoreCacheService.waitForInFlightCacheFillsToComplete(30L, TimeUnit.SECONDS));
}
}
@Override
protected void createRepository(String repoName, String type, Settings.Builder settings, boolean verify) {
// add use for peer recovery setting randomly to verify that these features work together.
Settings.Builder newSettings = randomBoolean()
? settings
: Settings.builder().put(BlobStoreRepository.USE_FOR_PEER_RECOVERY_SETTING.getKey(), true).put(settings.build());
super.createRepository(repoName, type, newSettings, verify);
}
protected String mountSnapshot(String repositoryName, String snapshotName, String indexName, Settings restoredIndexSettings)
throws Exception {
final String restoredIndexName = randomBoolean() ? indexName : randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
mountSnapshot(repositoryName, snapshotName, indexName, restoredIndexName, restoredIndexSettings);
return restoredIndexName;
}
protected void mountSnapshot(
String repositoryName,
String snapshotName,
String indexName,
String restoredIndexName,
Settings restoredIndexSettings
) throws Exception {
mountSnapshot(repositoryName, snapshotName, indexName, restoredIndexName, restoredIndexSettings, Storage.FULL_COPY);
}
protected void mountSnapshot(
String repositoryName,
String snapshotName,
String indexName,
String restoredIndexName,
Settings restoredIndexSettings,
final Storage storage
) throws Exception {
final MountSearchableSnapshotRequest mountRequest = new MountSearchableSnapshotRequest(
TEST_REQUEST_TIMEOUT,
restoredIndexName,
repositoryName,
snapshotName,
indexName,
restoredIndexSettings,
Strings.EMPTY_ARRAY,
true,
storage
);
final RestoreSnapshotResponse restoreResponse = client().execute(MountSearchableSnapshotAction.INSTANCE, mountRequest).actionGet();
assertThat(restoreResponse.getRestoreInfo().successfulShards(), equalTo(getNumShards(restoredIndexName).numPrimaries));
assertThat(restoreResponse.getRestoreInfo().failedShards(), equalTo(0));
}
protected void createAndPopulateIndex(String indexName, Settings.Builder settings) throws InterruptedException {
assertAcked(prepareCreate(indexName, settings));
ensureGreen(indexName);
populateIndex(indexName, 100);
}
protected void populateIndex(String indexName, int maxIndexRequests) throws InterruptedException {
final List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
// This index does not permit dynamic fields, so we can only use defined field names
final String key = indexName.equals(SearchableSnapshots.SNAPSHOT_BLOB_CACHE_INDEX) ? "type" : "foo";
for (int i = between(10, maxIndexRequests); i >= 0; i--) {
indexRequestBuilders.add(prepareIndex(indexName).setSource(key, randomBoolean() ? "bar" : "baz"));
}
indexRandom(true, true, indexRequestBuilders);
refresh(indexName);
if (randomBoolean()) {
assertThat(
indicesAdmin().prepareForceMerge(indexName).setOnlyExpungeDeletes(true).setFlush(true).get().getFailedShards(),
equalTo(0)
);
}
}
protected void checkSoftDeletesNotEagerlyLoaded(String restoredIndexName) {
for (IndicesService indicesService : internalCluster().getDataNodeInstances(IndicesService.class)) {
for (IndexService indexService : indicesService) {
if (indexService.index().getName().equals(restoredIndexName)) {
for (IndexShard indexShard : indexService) {
try {
Engine engine = IndexShardTestCase.getEngine(indexShard);
assertThat(engine, instanceOf(ReadOnlyEngine.class));
EngineTestCase.checkNoSoftDeletesLoaded((ReadOnlyEngine) engine);
} catch (AlreadyClosedException ace) {
// ok to ignore these
}
}
}
}
}
}
protected void assertShardFolders(String indexName, boolean isSearchableSnapshot) throws IOException {
final Index restoredIndex = resolveIndex(indexName);
final String customDataPath = resolveCustomDataPath(indexName);
final ShardId shardId = new ShardId(restoredIndex, 0);
boolean shardFolderFound = false;
for (String node : internalCluster().getNodeNames()) {
final NodeEnvironment service = internalCluster().getInstance(NodeEnvironment.class, node);
final ShardPath shardPath = ShardPath.loadShardPath(logger, service, shardId, customDataPath);
if (shardPath != null && Files.exists(shardPath.getDataPath())) {
shardFolderFound = true;
final boolean indexExists = Files.exists(shardPath.resolveIndex());
final boolean translogExists = Files.exists(shardPath.resolveTranslog());
logger.info(
"--> [{}] verifying shard data path [{}] (index exists: {}, translog exists: {})",
node,
shardPath.getDataPath(),
indexExists,
translogExists
);
assertThat(
isSearchableSnapshot ? "Index file should not exist" : "Index file should exist",
indexExists,
not(isSearchableSnapshot)
);
if (isSearchableSnapshot) {
assertThat("Translog should not exist", translogExists, equalTo(false));
} else {
assertThat("Translog should exist", translogExists, equalTo(true));
try (Stream<Path> dir = Files.list(shardPath.resolveTranslog())) {
final long translogFiles = dir.filter(path -> path.getFileName().toString().contains("translog")).count();
assertThat(
"There should be 2+ translog files for a non-snapshot directory",
translogFiles,
greaterThanOrEqualTo(2L)
);
}
}
}
}
assertTrue("no shard folder found for index " + indexName, shardFolderFound);
}
protected void assertTotalHits(String indexName, TotalHits originalAllHits, TotalHits originalBarHits) throws Exception {
final Thread[] threads = new Thread[between(1, 5)];
final AtomicArray<TotalHits> allHits = new AtomicArray<>(threads.length);
final AtomicArray<TotalHits> barHits = new AtomicArray<>(threads.length);
final CountDownLatch latch = new CountDownLatch(1);
for (int i = 0; i < threads.length; i++) {
int t = i;
threads[i] = new Thread(() -> {
try {
latch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
assertResponse(prepareSearch(indexName).setTrackTotalHits(true), resp -> allHits.set(t, resp.getHits().getTotalHits()));
assertResponse(
prepareSearch(indexName).setTrackTotalHits(true).setQuery(matchQuery("foo", "bar")),
resp -> barHits.set(t, resp.getHits().getTotalHits())
);
});
threads[i].start();
}
ensureGreen(indexName);
latch.countDown();
for (int i = 0; i < threads.length; i++) {
threads[i].join();
final TotalHits allTotalHits = allHits.get(i);
final TotalHits barTotalHits = barHits.get(i);
logger.info("--> thread #{} has [{}] hits in total, of which [{}] match the query", i, allTotalHits, barTotalHits);
assertThat(allTotalHits, equalTo(originalAllHits));
assertThat(barTotalHits, equalTo(originalBarHits));
}
}
protected void assertRecoveryStats(String indexName, boolean preWarmEnabled) throws Exception {
int shardCount = getNumShards(indexName).totalNumShards;
assertBusy(() -> {
final RecoveryResponse recoveryResponse = indicesAdmin().prepareRecoveries(indexName).get();
assertThat(recoveryResponse.toString(), recoveryResponse.shardRecoveryStates().get(indexName).size(), equalTo(shardCount));
for (List<RecoveryState> recoveryStates : recoveryResponse.shardRecoveryStates().values()) {
for (RecoveryState recoveryState : recoveryStates) {
RecoveryState.Index index = recoveryState.getIndex();
assertThat(
Strings.toString(recoveryState, true, true),
index.recoveredFileCount(),
preWarmEnabled ? equalTo(index.totalRecoverFiles()) : greaterThanOrEqualTo(0)
);
assertThat(recoveryState.getStage(), equalTo(RecoveryState.Stage.DONE));
}
}
}, 30L, TimeUnit.SECONDS);
}
protected DiscoveryNodes getDiscoveryNodes() {
return clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).clear().setNodes(true).get().getState().nodes();
}
protected void assertExecutorIsIdle(String executorName) throws Exception {
assertBusy(() -> {
for (ThreadPool threadPool : internalCluster().getInstances(ThreadPool.class)) {
ThreadPoolExecutor threadPoolExecutor = (ThreadPoolExecutor) threadPool.executor(executorName);
assertThat(threadPoolExecutor.getQueue().size(), equalTo(0));
assertThat(threadPoolExecutor.getActiveCount(), equalTo(0));
}
});
}
protected static void waitUntilRecoveryIsDone(String index) throws Exception {
assertBusy(() -> {
RecoveryResponse recoveryResponse = indicesAdmin().prepareRecoveries(index).get();
assertThat(recoveryResponse.hasRecoveries(), equalTo(true));
for (List<RecoveryState> value : recoveryResponse.shardRecoveryStates().values()) {
for (RecoveryState recoveryState : value) {
assertThat(recoveryState.getStage(), equalTo(RecoveryState.Stage.DONE));
}
}
});
}
public static | BaseSearchableSnapshotsIntegTestCase |
java | apache__camel | components/camel-fhir/camel-fhir-component/src/main/java/org/apache/camel/component/fhir/internal/FhirHelper.java | {
"start": 1824,
"end": 1916
} | class ____ creating FHIR {@link ca.uhn.fhir.rest.client.api.IGenericClient}
*/
public final | for |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/QuarkusClassWriter.java | {
"start": 276,
"end": 729
} | class ____ extends ClassWriter {
public QuarkusClassWriter(final ClassReader classReader, final int flags) {
super(classReader, flags);
}
public QuarkusClassWriter(final int flags) {
super(flags);
}
@Override
protected ClassLoader getClassLoader() {
// the TCCL is safe for transformations when this ClassWriter runs
return Thread.currentThread().getContextClassLoader();
}
}
| QuarkusClassWriter |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/PreloadClassesEnabledBuildItem.java | {
"start": 188,
"end": 470
} | class ____ extends SimpleBuildItem {
private final boolean initialize;
public PreloadClassesEnabledBuildItem(boolean initialize) {
this.initialize = initialize;
}
public boolean doInitialize() {
return initialize;
}
}
| PreloadClassesEnabledBuildItem |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/bind/binders/BodyArgumentBinder.java | {
"start": 815,
"end": 1053
} | interface ____<T> extends AnnotatedRequestArgumentBinder<Body, T> {
/**
* @return The required annotation type
*/
@Override
default Class<Body> getAnnotationType() {
return Body.class;
}
}
| BodyArgumentBinder |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/time/FastDatePrinter.java | {
"start": 22606,
"end": 23484
} | class ____ implements NumberRule {
static final TwoDigitYearField INSTANCE = new TwoDigitYearField();
/**
* Constructs an instance of {@link TwoDigitYearField}.
*/
TwoDigitYearField() {
}
/**
* {@inheritDoc}
*/
@Override
public void appendTo(final Appendable buffer, final Calendar calendar) throws IOException {
appendTo(buffer, calendar.get(Calendar.YEAR) % 100);
}
/**
* {@inheritDoc}
*/
@Override
public void appendTo(final Appendable buffer, final int value) throws IOException {
appendDigits(buffer, value % 100);
}
/**
* {@inheritDoc}
*/
@Override
public int estimateLength() {
return 2;
}
}
/**
* Inner | TwoDigitYearField |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.