language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/params/ParameterizedClassIntegrationTests.java | {
"start": 57287,
"end": 57841
} | class ____ {
@Nullable
static Wrapper instance;
@BeforeAll
@AfterAll
static void clearWrapper() {
instance = null;
}
@Parameter
@ConvertWith(Wrapper.Converter.class)
Wrapper wrapper;
@Test
void test1() {
setOrCheckWrapper();
}
@Test
void test2() {
setOrCheckWrapper();
}
private void setOrCheckWrapper() {
if (instance == null) {
instance = wrapper;
}
else {
assertSame(instance, wrapper);
}
}
}
record Wrapper(int value) {
static | ArgumentConversionPerInvocationFieldInjectionTestCase |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/management/SpringManagedSanitizeTest.java | {
"start": 1160,
"end": 1428
} | class ____ extends ManagedSanitizeTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/management/SpringManagedSanitizeTest.xml");
}
}
| SpringManagedSanitizeTest |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/Aws2StepFunctionsComponentBuilderFactory.java | {
"start": 16181,
"end": 20618
} | class ____
extends AbstractComponentBuilder<StepFunctions2Component>
implements Aws2StepFunctionsComponentBuilder {
@Override
protected StepFunctions2Component buildConcreteComponent() {
return new StepFunctions2Component();
}
private org.apache.camel.component.aws2.stepfunctions.StepFunctions2Configuration getOrCreateConfiguration(StepFunctions2Component component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.aws2.stepfunctions.StepFunctions2Configuration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "configuration": ((StepFunctions2Component) component).setConfiguration((org.apache.camel.component.aws2.stepfunctions.StepFunctions2Configuration) value); return true;
case "lazyStartProducer": ((StepFunctions2Component) component).setLazyStartProducer((boolean) value); return true;
case "operation": getOrCreateConfiguration((StepFunctions2Component) component).setOperation((org.apache.camel.component.aws2.stepfunctions.StepFunctions2Operations) value); return true;
case "overrideEndpoint": getOrCreateConfiguration((StepFunctions2Component) component).setOverrideEndpoint((boolean) value); return true;
case "pojoRequest": getOrCreateConfiguration((StepFunctions2Component) component).setPojoRequest((boolean) value); return true;
case "profileCredentialsName": getOrCreateConfiguration((StepFunctions2Component) component).setProfileCredentialsName((java.lang.String) value); return true;
case "region": getOrCreateConfiguration((StepFunctions2Component) component).setRegion((java.lang.String) value); return true;
case "trustAllCertificates": getOrCreateConfiguration((StepFunctions2Component) component).setTrustAllCertificates((boolean) value); return true;
case "uriEndpointOverride": getOrCreateConfiguration((StepFunctions2Component) component).setUriEndpointOverride((java.lang.String) value); return true;
case "useDefaultCredentialsProvider": getOrCreateConfiguration((StepFunctions2Component) component).setUseDefaultCredentialsProvider((boolean) value); return true;
case "useProfileCredentialsProvider": getOrCreateConfiguration((StepFunctions2Component) component).setUseProfileCredentialsProvider((boolean) value); return true;
case "autowiredEnabled": ((StepFunctions2Component) component).setAutowiredEnabled((boolean) value); return true;
case "awsSfnClient": getOrCreateConfiguration((StepFunctions2Component) component).setAwsSfnClient((software.amazon.awssdk.services.sfn.SfnClient) value); return true;
case "healthCheckConsumerEnabled": ((StepFunctions2Component) component).setHealthCheckConsumerEnabled((boolean) value); return true;
case "healthCheckProducerEnabled": ((StepFunctions2Component) component).setHealthCheckProducerEnabled((boolean) value); return true;
case "proxyHost": getOrCreateConfiguration((StepFunctions2Component) component).setProxyHost((java.lang.String) value); return true;
case "proxyPort": getOrCreateConfiguration((StepFunctions2Component) component).setProxyPort((java.lang.Integer) value); return true;
case "proxyProtocol": getOrCreateConfiguration((StepFunctions2Component) component).setProxyProtocol((software.amazon.awssdk.core.Protocol) value); return true;
case "accessKey": getOrCreateConfiguration((StepFunctions2Component) component).setAccessKey((java.lang.String) value); return true;
case "secretKey": getOrCreateConfiguration((StepFunctions2Component) component).setSecretKey((java.lang.String) value); return true;
case "sessionToken": getOrCreateConfiguration((StepFunctions2Component) component).setSessionToken((java.lang.String) value); return true;
case "useSessionCredentials": getOrCreateConfiguration((StepFunctions2Component) component).setUseSessionCredentials((boolean) value); return true;
default: return false;
}
}
}
} | Aws2StepFunctionsComponentBuilderImpl |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/annotation/Introspected.java | {
"start": 8483,
"end": 9674
} | class ____ is the builder for this type.
*
* <p>Should be accessible. Mutually exclusive with {@link #builderMethod()}</p>
* @return The builder class
*/
Class<?> builderClass() default void.class;
/**
* The name of the method that is the builder for this type.
* <p>Should be accessible. Mutually exclusive with {@link #builderClass()} ()}</p>
* @return The builder method.
*/
String builderMethod() default "";
/**
* The name of the method that builds the instance.
*
* @return The method name.
*/
String creatorMethod() default "build";
/**
* @return The accessor style for the write methods of the builder.
*/
AccessorsStyle accessorStyle() default @AccessorsStyle(writePrefixes = "");
}
/**
* Allow pre-computed indexes for property lookups based on an annotation and a member.
*
* @see io.micronaut.core.beans.BeanIntrospection#getIndexedProperty(Class, String)
*/
@Documented
@Retention(RUNTIME)
@Target({ElementType.TYPE, ElementType.ANNOTATION_TYPE})
@ | that |
java | apache__camel | components/camel-salesforce/camel-salesforce-maven-plugin/src/test/resources/generated/Case_PickListAccentMarkEnum.java | {
"start": 368,
"end": 1020
} | enum ____ {
// Audiencia de Conciliación
AUDIENCIA_DE_CONCILIACIÓN("Audiencia de Conciliaci\u00F3n");
final String value;
private Case_PickListAccentMarkEnum(String value) {
this.value = value;
}
@JsonValue
public String value() {
return this.value;
}
@JsonCreator
public static Case_PickListAccentMarkEnum fromValue(String value) {
for (Case_PickListAccentMarkEnum e : Case_PickListAccentMarkEnum.values()) {
if (e.value.equals(value)) {
return e;
}
}
throw new IllegalArgumentException(value);
}
}
| Case_PickListAccentMarkEnum |
java | quarkusio__quarkus | integration-tests/hibernate-search-standalone-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/standalone/elasticsearch/search/Person.java | {
"start": 664,
"end": 1412
} | class ____ {
@DocumentId
private final long id;
@FullTextField(analyzer = "standard")
@KeywordField(name = "name_sort", normalizer = "lowercase", sortable = Sortable.YES)
private String name;
@IndexedEmbedded
private Address address;
public Person(long id, String name, Address address) {
this.id = id;
this.name = name;
this.address = address;
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Address getAddress() {
return address;
}
public void setAddress(Address address) {
this.address = address;
}
}
| Person |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/jmx/export/assembler/AutodetectCapableMBeanInfoAssembler.java | {
"start": 779,
"end": 1108
} | interface ____ given the opportunity by the
* {@code MBeanExporter} to include additional beans in the registration process.
*
* <p>The exact mechanism for deciding which beans to include is left to
* implementing classes.
*
* @author Rob Harrop
* @since 1.2
* @see org.springframework.jmx.export.MBeanExporter
*/
public | are |
java | apache__camel | components/camel-as2/camel-as2-api/src/main/java/org/apache/camel/component/as2/api/AS2ServerConnection.java | {
"start": 5456,
"end": 12211
} | class ____ {
private final Certificate[] signingCertificateChain;
private final PrivateKey signingPrivateKey;
private final PrivateKey decryptingPrivateKey;
private final Certificate[] validateSigningCertificateChain;
private final AS2SignatureAlgorithm signingAlgorithm;
public AS2ConsumerConfiguration(
AS2SignatureAlgorithm signingAlgorithm,
Certificate[] signingCertificateChain,
PrivateKey signingPrivateKey,
PrivateKey decryptingPrivateKey,
Certificate[] validateSigningCertificateChain) {
this.signingAlgorithm = signingAlgorithm;
this.signingCertificateChain = signingCertificateChain;
this.signingPrivateKey = signingPrivateKey;
this.decryptingPrivateKey = decryptingPrivateKey;
this.validateSigningCertificateChain = validateSigningCertificateChain;
}
// Getters
public Certificate[] getValidateSigningCertificateChain() {
return validateSigningCertificateChain;
}
public Certificate[] getSigningCertificateChain() {
return signingCertificateChain;
}
public AS2SignatureAlgorithm getSigningAlgorithm() {
return signingAlgorithm;
}
public PrivateKey getSigningPrivateKey() {
return signingPrivateKey;
}
public PrivateKey getDecryptingPrivateKey() {
return decryptingPrivateKey;
}
}
/**
* Retrieves the specific AS2 consumer configuration associated with the given request path.
*
* @param path The canonical request URI path (e.g., "/consumerA").
* @return An Optional containing the configuration if a match is found, otherwise empty.
*/
public Optional<AS2ConsumerConfiguration> getConfigurationForPath(String path) {
return Optional.ofNullable(consumerConfigurations.get(path));
}
/**
* Dynamically determines and injects the AS2 security configuration (keys, certificates, and algorithm) for the
* incoming HTTP request.
*
* This method performs three main tasks: 1. Looks up the correct AS2ConsumerConfiguration based on the request URI
* path. 2. Injects the decryption and signing security material into the HttpContext for use by downstream
* processors (like the AS2Consumer and ResponseMDN). 3. Stores the configuration in a ThreadLocal for use by
* asynchronous MDN logic.
*
* @param request The incoming HTTP request.
* @param context The shared execution context for the request lifecycle.
* @return The AS2ConsumerConfiguration object found, or null if none was matched.
*/
private AS2ConsumerConfiguration setupConfigurationForRequest(ClassicHttpRequest request, HttpContext context) {
String requestUri = request.getRequestUri();
String requestUriPath = cleanUpPath(requestUri);
// 1. LOOKUP: Find the specific consumer configuration
AS2ConsumerConfiguration config = AS2ServerConnection.this
.getConfigurationForPath(requestUriPath).orElse(null);
// 2. Logging BEFORE injection (CRITICAL for debugging path issues)
LOG.debug("Processing request. Incoming URI: {}, Canonical Path: {}. Config Found: {}",
requestUri, requestUriPath, (config != null));
// 3. Handle missing config
if (config == null) {
LOG.warn("No AS2 consumer configuration found for canonical path: {}. Encrypted messages will likely fail.",
requestUriPath);
return null;
}
// 4. INJECTION: Inject dynamic security keys into the HttpContext
context.setAttribute(AS2_DECRYPTING_PRIVATE_KEY, config.getDecryptingPrivateKey());
context.setAttribute(AS2_VALIDATE_SIGNING_CERTIFICATE_CHAIN, config.getValidateSigningCertificateChain());
context.setAttribute(AS2_SIGNING_PRIVATE_KEY, config.getSigningPrivateKey());
context.setAttribute(AS2_SIGNING_CERTIFICATE_CHAIN, config.getSigningCertificateChain());
context.setAttribute(AS2_SIGNING_ALGORITHM, config.getSigningAlgorithm());
// 5. CRITICAL READ-BACK CHECK: Immediately check if the key is retrievable from the context
Object checkKey = context.getAttribute(AS2_DECRYPTING_PRIVATE_KEY);
if (checkKey == null) {
LOG.error(
"FATAL: Decrypting Private Key failed to be read back from HttpContext immediately after injection for path: {}",
requestUriPath);
} else if (!(checkKey instanceof PrivateKey)) {
LOG.error("FATAL: Key in HttpContext is not a PrivateKey object! Found type: {}", checkKey.getClass().getName());
} else {
LOG.debug("Context injection confirmed: Decrypting Key set successfully into HttpContext. Key type: {}",
checkKey.getClass().getName());
}
// 6. Set ThreadLocal for later MDN processing
ThreadLocalConfigWrapper wrapper = new ThreadLocalConfigWrapper(config, requestUriPath);
CURRENT_CONSUMER_CONFIG.set(wrapper);
return config;
}
/**
* Extracts and normalizes the path component from the request URI.
*
* This ensures consistency by stripping query parameters and scheme/authority, and defaults to "/" if the path is
* empty or parsing fails.
*
* @param requestUri The full request URI string from the HTTP request line.
* @return The canonical path, starting with a "/", without query parameters.
*/
private String cleanUpPath(String requestUri) {
try {
URI uri = new URI(requestUri);
String path = uri.getPath();
// Ensure path is not null and normalize to "/" if it is empty/null after parsing
if (path == null || path.isEmpty()) {
return "/";
}
return path;
} catch (Exception e) {
// Should not happen for a valid HTTP request line
LOG.warn("Error parsing request URI: {}", requestUri, e);
return "/"; // Default to root path in case of error
}
}
/**
* Interceptor that executes early in the request processing chain to find the correct
* {@link AS2ConsumerConfiguration} for the incoming request URI and injects its security material
* (keys/certs/algorithm) into the {@link HttpContext} and {@link ThreadLocal} storage.
*/
private | AS2ConsumerConfiguration |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/rest/handler/legacy/checkpoints/CheckpointStatsCacheTest.java | {
"start": 1310,
"end": 2780
} | class ____ {
@Test
void testZeroSizeCache() throws Exception {
AbstractCheckpointStats checkpoint = createCheckpoint(0, CheckpointStatsStatus.COMPLETED);
CheckpointStatsCache cache = new CheckpointStatsCache(0);
cache.tryAdd(checkpoint);
assertThat(cache.tryGet(0L)).isNull();
}
@Test
void testCacheAddAndGet() throws Exception {
AbstractCheckpointStats chk0 = createCheckpoint(0, CheckpointStatsStatus.COMPLETED);
AbstractCheckpointStats chk1 = createCheckpoint(1, CheckpointStatsStatus.COMPLETED);
AbstractCheckpointStats chk2 = createCheckpoint(2, CheckpointStatsStatus.IN_PROGRESS);
CheckpointStatsCache cache = new CheckpointStatsCache(1);
cache.tryAdd(chk0);
assertThat(cache.tryGet(0)).isEqualTo(chk0);
cache.tryAdd(chk1);
assertThat(cache.tryGet(0)).isNull();
assertThat(cache.tryGet(1)).isEqualTo(chk1);
cache.tryAdd(chk2);
assertThat(cache.tryGet(2)).isNull();
assertThat(cache.tryGet(0)).isNull();
assertThat(cache.tryGet(1)).isEqualTo(chk1);
}
private AbstractCheckpointStats createCheckpoint(long id, CheckpointStatsStatus status) {
AbstractCheckpointStats checkpoint = mock(AbstractCheckpointStats.class);
when(checkpoint.getCheckpointId()).thenReturn(id);
when(checkpoint.getStatus()).thenReturn(status);
return checkpoint;
}
}
| CheckpointStatsCacheTest |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/http/Http2ServerTest.java | {
"start": 32076,
"end": 122701
} | class ____ {
void send() {
boolean writable = request.encoder.flowController().isWritable(stream);
if (writable) {
Buffer buf = Buffer.buffer(chunk);
expected.appendBuffer(buf);
request.encoder.writeData(request.context, id, ((BufferInternal)buf).getByteBuf(), 0, false, request.context.newPromise());
request.context.flush();
request.context.executor().execute(this::send);
} else {
request.encoder.writeData(request.context, id, Unpooled.EMPTY_BUFFER, 0, true, request.context.newPromise());
request.context.flush();
paused.set(true);
}
}
}
new Anonymous().send();
});
await();
}
@Test
public void testServerResponseWritability() throws Exception {
testStreamWritability(req -> {
HttpServerResponse resp = req.response();
resp.putHeader("content-type", "text/plain");
resp.setChunked(true);
return Future.succeededFuture(resp);
});
}
private void testStreamWritability(Function<HttpServerRequest, Future<WriteStream<Buffer>>> streamProvider) throws Exception {
Context ctx = vertx.getOrCreateContext();
String content = TestUtils.randomAlphaString(1024);
StringBuilder expected = new StringBuilder();
Promise<Void> whenFull = Promise.promise();
AtomicBoolean drain = new AtomicBoolean();
server.requestHandler(req -> {
Future<WriteStream<Buffer>> fut = streamProvider.apply(req);
fut.onComplete(onSuccess(stream -> {
vertx.setPeriodic(1, timerID -> {
if (stream.writeQueueFull()) {
stream.drainHandler(v -> {
assertOnIOContext(ctx);
expected.append("last");
stream.end(Buffer.buffer("last"));
});
vertx.cancelTimer(timerID);
drain.set(true);
whenFull.complete();
} else {
expected.append(content);
Buffer buf = Buffer.buffer(content);
stream.write(buf);
}
});
}));
});
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
AtomicInteger toAck = new AtomicInteger();
int id = request.nextStreamId();
Http2ConnectionEncoder encoder = request.encoder;
encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.decoder.frameListener(new Http2FrameAdapter() {
StringBuilder received = new StringBuilder();
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception {
received.append(data.toString(StandardCharsets.UTF_8));
int delta = super.onDataRead(ctx, streamId, data, padding, endOfStream);
if (endOfStream) {
vertx.runOnContext(v -> {
assertEquals(expected.toString(), received.toString());
testComplete();
});
return delta;
} else {
if (drain.get()) {
return delta;
} else {
toAck.getAndAdd(delta);
return 0;
}
}
}
});
whenFull.future().onComplete(ar -> {
request.context.executor().execute(() -> {
try {
request.decoder.flowController().consumeBytes(request.connection.stream(id), toAck.intValue());
request.context.flush();
} catch (Http2Exception e) {
e.printStackTrace();
fail(e);
}
});
});
});
await();
}
@Test
public void testTrailers() throws Exception {
server.requestHandler(req -> {
HttpServerResponse resp = req.response();
resp.setChunked(true);
resp.write("some-content");
resp.putTrailer("Foo", "foo_value");
resp.putTrailer("bar", "bar_value");
resp.putTrailer("juu", (List<String>)Arrays.asList("juu_value_1", "juu_value_2"));
resp.end();
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.decoder.frameListener(new Http2EventAdapter() {
int count;
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
switch (count++) {
case 0:
vertx.runOnContext(v -> {
assertFalse(endStream);
});
break;
case 1:
vertx.runOnContext(v -> {
assertEquals("foo_value", headers.get("foo").toString());
assertEquals(1, headers.getAll("foo").size());
assertEquals("foo_value", headers.getAll("foo").get(0).toString());
assertEquals("bar_value", headers.getAll("bar").get(0).toString());
assertEquals(2, headers.getAll("juu").size());
assertEquals("juu_value_1", headers.getAll("juu").get(0).toString());
assertEquals("juu_value_2", headers.getAll("juu").get(1).toString());
assertTrue(endStream);
testComplete();
});
break;
default:
vertx.runOnContext(v -> {
fail();
});
break;
}
}
});
int id = request.nextStreamId();
request.encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testServerResetClientStream1() throws Exception {
server.requestHandler(req -> {
req.handler(buf -> {
req.response().reset(8);
});
});
testServerResetClientStream(code -> {
assertEquals(8, code);
testComplete();
}, false);
}
@Test
public void testServerResetClientStream2() throws Exception {
server.requestHandler(req -> {
req.handler(buf -> {
req.response().end();
req.response().reset(8);
});
});
testServerResetClientStream(code -> {
assertEquals(8, code);
testComplete();
}, false);
}
@Test
public void testServerResetClientStream3() throws Exception {
server.requestHandler(req -> {
req.endHandler(buf -> {
req.response().reset(8);
});
});
testServerResetClientStream(code -> {
assertEquals(8, code);
testComplete();
}, true);
}
private void testServerResetClientStream(LongConsumer resetHandler, boolean end) throws Exception {
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onRstStreamRead(ChannelHandlerContext ctx, int streamId, long errorCode) throws Http2Exception {
vertx.runOnContext(v -> {
resetHandler.accept(errorCode);
});
}
});
Http2ConnectionEncoder encoder = request.encoder;
encoder.writeHeaders(request.context, id, GET("/"), 0, false, request.context.newPromise());
encoder.writeData(request.context, id, BufferInternal.buffer("hello").getByteBuf(), 0, end, request.context.newPromise());
});
await();
}
@Test
public void testClientResetServerStream() throws Exception {
Context ctx = vertx.getOrCreateContext();
Promise<Void> bufReceived = Promise.promise();
AtomicInteger resetCount = new AtomicInteger();
server.requestHandler(req -> {
req.handler(buf -> {
bufReceived.complete();
});
req.exceptionHandler(err -> {
assertOnIOContext(ctx);
if (err instanceof StreamResetException) {
assertEquals(10L, ((StreamResetException) err).getCode());
assertEquals(0, resetCount.getAndIncrement());
}
});
req.response().exceptionHandler(err -> {
assertOnIOContext(ctx);
if (err instanceof StreamResetException) {
assertEquals(10L, ((StreamResetException) err).getCode());
assertEquals(1, resetCount.getAndIncrement());
testComplete();
}
});
});
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
Http2ConnectionEncoder encoder = request.encoder;
encoder.writeHeaders(request.context, id, GET("/"), 0, false, request.context.newPromise());
encoder.writeData(request.context, id, BufferInternal.buffer("hello").getByteBuf(), 0, false, request.context.newPromise());
bufReceived.future().onComplete(ar -> {
encoder.writeRstStream(request.context, id, 10, request.context.newPromise());
request.context.flush();
});
});
await();
}
@Test
public void testConnectionClose() throws Exception {
Context ctx = vertx.getOrCreateContext();
server.requestHandler(req -> {
HttpConnection conn = req.connection();
conn.closeHandler(v -> {
assertSame(ctx, Vertx.currentContext());
testComplete();
});
req.response().putHeader("Content-Type", "text/plain").end();
});
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
Http2ConnectionEncoder encoder = request.encoder;
encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.decoder.frameListener(new Http2FrameAdapter() {
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
request.context.close();
}
});
});
await();
}
@Test
public void testPushPromise() throws Exception {
testPushPromise(GET("/").authority("whatever.com"), (resp, handler ) -> {
resp.push(HttpMethod.GET, "/wibble").onComplete(handler);
}, headers -> {
assertEquals("GET", headers.method().toString());
assertEquals("https", headers.scheme().toString());
assertEquals("/wibble", headers.path().toString());
assertEquals("whatever.com", headers.authority().toString());
});
}
@Test
public void testPushPromiseHeaders() throws Exception {
testPushPromise(GET("/").authority("whatever.com"), (resp, handler ) -> {
resp.push(HttpMethod.GET, "/wibble", HttpHeaders.
set("foo", "foo_value").
set("bar", Arrays.<CharSequence>asList("bar_value_1", "bar_value_2"))).onComplete(handler);
}, headers -> {
assertEquals("GET", headers.method().toString());
assertEquals("https", headers.scheme().toString());
assertEquals("/wibble", headers.path().toString());
assertEquals("whatever.com", headers.authority().toString());
assertEquals("foo_value", headers.get("foo").toString());
assertEquals(Arrays.asList("bar_value_1", "bar_value_2"), headers.getAll("bar").stream().map(CharSequence::toString).collect(Collectors.toList()));
});
}
@Test
public void testPushPromiseNoAuthority() throws Exception {
Http2Headers get = GET("/");
get.remove("authority");
testPushPromise(get, (resp, handler ) -> {
resp.push(HttpMethod.GET, "/wibble").onComplete(handler);
}, headers -> {
assertEquals("GET", headers.method().toString());
assertEquals("https", headers.scheme().toString());
assertEquals("/wibble", headers.path().toString());
assertNull(headers.authority());
});
}
@Test
public void testPushPromiseOverrideAuthority() throws Exception {
testPushPromise(GET("/").authority("whatever.com"), (resp, handler ) -> {
resp.push(HttpMethod.GET, HostAndPort.authority("override.com"), "/wibble").onComplete(handler);
}, headers -> {
assertEquals("GET", headers.method().toString());
assertEquals("https", headers.scheme().toString());
assertEquals("/wibble", headers.path().toString());
assertEquals("override.com", headers.authority().toString());
});
}
private void testPushPromise(Http2Headers requestHeaders,
BiConsumer<HttpServerResponse, Handler<AsyncResult<HttpServerResponse>>> pusher,
Consumer<Http2Headers> headerChecker) throws Exception {
Context ctx = vertx.getOrCreateContext();
server.requestHandler(req -> {
Handler<AsyncResult<HttpServerResponse>> handler = ar -> {
assertSameEventLoop(ctx, Vertx.currentContext());
assertTrue(ar.succeeded());
HttpServerResponse response = ar.result();
response./*putHeader("content-type", "application/plain").*/end("the_content");
assertIllegalStateException(() -> response.push(HttpMethod.GET, "/wibble2"));
};
pusher.accept(req.response(), handler);
});
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
Http2ConnectionEncoder encoder = request.encoder;
encoder.writeHeaders(request.context, id, requestHeaders, 0, true, request.context.newPromise());
Map<Integer, Http2Headers> pushed = new HashMap<>();
request.decoder.frameListener(new Http2FrameAdapter() {
@Override
public void onPushPromiseRead(ChannelHandlerContext ctx, int streamId, int promisedStreamId, Http2Headers headers, int padding) throws Http2Exception {
pushed.put(promisedStreamId, headers);
}
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception {
int delta = super.onDataRead(ctx, streamId, data, padding, endOfStream);
String content = data.toString(StandardCharsets.UTF_8);
vertx.runOnContext(v -> {
assertEquals(Collections.singleton(streamId), pushed.keySet());
assertEquals("the_content", content);
Http2Headers pushedHeaders = pushed.get(streamId);
headerChecker.accept(pushedHeaders);
testComplete();
});
return delta;
}
});
});
await();
}
@Test
public void testResetActivePushPromise() throws Exception {
Context ctx = vertx.getOrCreateContext();
server.requestHandler(req -> {
req.response().push(HttpMethod.GET, "/wibble").onComplete(onSuccess(response -> {
assertOnIOContext(ctx);
AtomicInteger resets = new AtomicInteger();
response.exceptionHandler(err -> {
if (err instanceof StreamResetException) {
assertEquals(8, ((StreamResetException)err).getCode());
resets.incrementAndGet();
}
});
response.closeHandler(v -> {
testComplete();
assertEquals(1, resets.get());
});
response.setChunked(true).write("some_content");
}));
});
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
Http2ConnectionEncoder encoder = request.encoder;
encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.decoder.frameListener(new Http2FrameAdapter() {
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception {
request.encoder.writeRstStream(ctx, streamId, Http2Error.CANCEL.code(), ctx.newPromise());
request.context.flush();
return super.onDataRead(ctx, streamId, data, padding, endOfStream);
}
});
});
await();
}
@Test
public void testQueuePushPromise() throws Exception {
Context ctx = vertx.getOrCreateContext();
int numPushes = 10;
Set<String> pushSent = new HashSet<>();
server.requestHandler(req -> {
req.response().setChunked(true).write("abc");
for (int i = 0; i < numPushes; i++) {
int val = i;
String path = "/wibble" + val;
req.response().push(HttpMethod.GET, path).onComplete(onSuccess(response -> {
assertSameEventLoop(ctx, Vertx.currentContext());
pushSent.add(path);
vertx.setTimer(10, id -> {
response.end("wibble-" + val);
});
}));
}
});
startServer(ctx);
TestClient client = new TestClient();
client.settings.maxConcurrentStreams(3);
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
Http2ConnectionEncoder encoder = request.encoder;
encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.decoder.frameListener(new Http2FrameAdapter() {
int count = numPushes;
Set<String> pushReceived = new HashSet<>();
@Override
public void onPushPromiseRead(ChannelHandlerContext ctx, int streamId, int promisedStreamId, Http2Headers headers, int padding) throws Http2Exception {
pushReceived.add(headers.path().toString());
}
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception {
if (count-- == 0) {
vertx.runOnContext(v -> {
assertEquals(numPushes, pushSent.size());
assertEquals(pushReceived, pushSent);
testComplete();
});
}
return super.onDataRead(ctx, streamId, data, padding, endOfStream);
}
});
});
await();
}
@Test
public void testResetPendingPushPromise() throws Exception {
Context ctx = vertx.getOrCreateContext();
server.requestHandler(req -> {
req.response().push(HttpMethod.GET, "/wibble").onComplete(onFailure(r -> {
assertOnIOContext(ctx);
testComplete();
}));
});
startServer(ctx);
TestClient client = new TestClient();
client.settings.maxConcurrentStreams(0);
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
Http2ConnectionEncoder encoder = request.encoder;
encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.decoder.frameListener(new Http2FrameAdapter() {
@Override
public void onPushPromiseRead(ChannelHandlerContext ctx, int streamId, int promisedStreamId, Http2Headers headers, int padding) throws Http2Exception {
request.encoder.writeRstStream(request.context, promisedStreamId, Http2Error.CANCEL.code(), request.context.newPromise());
request.context.flush();
}
});
});
await();
}
@Test
public void testHostHeaderInsteadOfAuthorityPseudoHeader() throws Exception {
// build the HTTP/2 headers, omit the ":authority" pseudo-header and include the "host" header instead
Http2Headers headers = new DefaultHttp2Headers().method("GET").scheme("https").path("/").set("host", DEFAULT_HTTPS_HOST_AND_PORT);
server.requestHandler(req -> {
// validate that the authority is properly populated
assertEquals(DEFAULT_HTTPS_HOST, req.authority().host());
assertEquals(DEFAULT_HTTPS_PORT, req.authority().port());
testComplete();
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
Http2ConnectionEncoder encoder = request.encoder;
encoder.writeHeaders(request.context, id, headers, 0, true, request.context.newPromise());
});
await();
}
@Test
public void testMissingMethodPseudoHeader() throws Exception {
testMalformedRequestHeaders(new DefaultHttp2Headers().scheme("http").path("/"));
}
@Test
public void testMissingSchemePseudoHeader() throws Exception {
testMalformedRequestHeaders(new DefaultHttp2Headers().method("GET").path("/"));
}
@Test
public void testMissingPathPseudoHeader() throws Exception {
testMalformedRequestHeaders(new DefaultHttp2Headers().method("GET").scheme("http"));
}
@Test
public void testInvalidAuthority() throws Exception {
testMalformedRequestHeaders(new DefaultHttp2Headers().method("GET").scheme("http").authority("foo@" + DEFAULT_HTTPS_HOST_AND_PORT).path("/"));
}
@Test
public void testInvalidHost1() throws Exception {
testMalformedRequestHeaders(new DefaultHttp2Headers().method("GET").scheme("http").authority(DEFAULT_HTTPS_HOST_AND_PORT).path("/").set("host", "foo@" + DEFAULT_HTTPS_HOST_AND_PORT));
}
@Test
public void testInvalidHost2() throws Exception {
testMalformedRequestHeaders(new DefaultHttp2Headers().method("GET").scheme("http").authority(DEFAULT_HTTPS_HOST_AND_PORT).path("/").set("host", "another-host:" + DEFAULT_HTTPS_PORT));
}
@Test
public void testInvalidHost3() throws Exception {
testMalformedRequestHeaders(new DefaultHttp2Headers().method("GET").scheme("http").authority(DEFAULT_HTTPS_HOST_AND_PORT).path("/").set("host", DEFAULT_HTTP_HOST));
}
@Test
public void testConnectInvalidPath() throws Exception {
testMalformedRequestHeaders(new DefaultHttp2Headers().method("CONNECT").path("/").authority(DEFAULT_HTTPS_HOST_AND_PORT));
}
@Test
public void testConnectInvalidScheme() throws Exception {
testMalformedRequestHeaders(new DefaultHttp2Headers().method("CONNECT").scheme("http").authority(DEFAULT_HTTPS_HOST_AND_PORT));
}
@Test
public void testConnectInvalidAuthority() throws Exception {
testMalformedRequestHeaders(new DefaultHttp2Headers().method("CONNECT").authority("foo@" + DEFAULT_HTTPS_HOST_AND_PORT));
}
private void testMalformedRequestHeaders(Http2Headers headers) throws Exception {
server.requestHandler(req -> fail());
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
Http2ConnectionEncoder encoder = request.encoder;
encoder.writeHeaders(request.context, id, headers, 0, true, request.context.newPromise());
request.decoder.frameListener(new Http2FrameAdapter() {
@Override
public void onRstStreamRead(ChannelHandlerContext ctx, int streamId, long errorCode) throws Http2Exception {
vertx.runOnContext(v -> {
testComplete();
});
}
});
});
await();
}
@Test
public void testRequestHandlerFailure() throws Exception {
testHandlerFailure(false, (err, server) -> {
server.requestHandler(req -> {
throw err;
});
});
}
@Test
public void testRequestEndHandlerFailure() throws Exception {
testHandlerFailure(false, (err, server) -> {
server.requestHandler(req -> {
req.endHandler(v -> {
throw err;
});
});
});
}
@Test
public void testRequestEndHandlerFailureWithData() throws Exception {
testHandlerFailure(true, (err, server) -> {
server.requestHandler(req -> {
req.endHandler(v -> {
throw err;
});
});
});
}
@Test
public void testRequestDataHandlerFailure() throws Exception {
testHandlerFailure(true, (err, server) -> {
server.requestHandler(req -> {
req.handler(buf -> {
throw err;
});
});
});
}
private void testHandlerFailure(boolean data, BiConsumer<RuntimeException, HttpServer> configurator) throws Exception {
RuntimeException failure = new RuntimeException();
io.vertx.core.http.Http2Settings settings = TestUtils.randomHttp2Settings();
server.close();
server = vertx.createHttpServer(new HttpServerOptions(serverOptions).setInitialSettings(settings));
configurator.accept(failure, server);
Context ctx = vertx.getOrCreateContext();
ctx.exceptionHandler(err -> {
assertSame(err, failure);
testComplete();
});
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
request.encoder.writeHeaders(request.context, id, GET("/"), 0, !data, request.context.newPromise());
if (data) {
request.encoder.writeData(request.context, id, BufferInternal.buffer("hello").getByteBuf(), 0, true, request.context.newPromise());
}
});
await();
}
private static File createTempFile(Buffer buffer) throws Exception {
File f = File.createTempFile("vertx", ".bin");
f.deleteOnExit();
try(FileOutputStream out = new FileOutputStream(f)) {
out.write(buffer.getBytes());
}
return f;
}
@Test
public void testSendFile() throws Exception {
Buffer expected = Buffer.buffer(TestUtils.randomAlphaString(1000 * 1000));
File tmp = createTempFile(expected);
testSendFile(expected, tmp.getAbsolutePath(), 0, expected.length());
}
@Test
public void testSendFileRange() throws Exception {
Buffer expected = Buffer.buffer(TestUtils.randomAlphaString(1000 * 1000));
File tmp = createTempFile(expected);
int from = 200 * 1000;
int to = 700 * 1000;
testSendFile(expected.slice(from, to), tmp.getAbsolutePath(), from, to - from);
}
@Test
public void testSendEmptyFile() throws Exception {
Buffer expected = Buffer.buffer();
File tmp = createTempFile(expected);
testSendFile(expected, tmp.getAbsolutePath(), 0, expected.length());
}
private void testSendFile(Buffer expected, String path, long offset, long length) throws Exception {
waitFor(2);
server.requestHandler(req -> {
HttpServerResponse resp = req.response();
resp.sendFile(path, offset, length).onComplete(onSuccess(v -> {
assertEquals(resp.bytesWritten(), length);
complete();
}));
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.decoder.frameListener(new Http2EventAdapter() {
Buffer buffer = Buffer.buffer();
Http2Headers responseHeaders;
private void endStream() {
vertx.runOnContext(v -> {
assertEquals("" + length, responseHeaders.get("content-length").toString());
assertEquals(expected, buffer);
complete();
});
}
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
responseHeaders = headers;
if (endStream) {
endStream();
}
}
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception {
buffer.appendBuffer(BufferInternal.buffer(data.duplicate()));
if (endOfStream) {
endStream();
}
return data.readableBytes() + padding;
}
});
int id = request.nextStreamId();
request.encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testStreamError() throws Exception {
Promise<Void> when = Promise.promise();
Context ctx = vertx.getOrCreateContext();
server.requestHandler(req -> {
AtomicInteger reqErrors = new AtomicInteger();
req.exceptionHandler(err -> {
// Called twice : reset + close
assertOnIOContext(ctx);
reqErrors.incrementAndGet();
});
AtomicInteger respErrors = new AtomicInteger();
req.response().exceptionHandler(err -> {
assertOnIOContext(ctx);
respErrors.incrementAndGet();
});
req.response().closeHandler(v -> {
assertOnIOContext(ctx);
assertTrue("Was expecting reqErrors to be > 0", reqErrors.get() > 0);
assertTrue("Was expecting respErrors to be > 0", respErrors.get() > 0);
testComplete();
});
req.response().endHandler(v -> {
fail();
});
when.complete();
});
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
Http2ConnectionEncoder encoder = request.encoder;
encoder.writeHeaders(request.context, id, GET("/"), 0, false, request.context.newPromise());
request.context.flush();
when.future().onComplete(ar -> {
// Send a corrupted frame on purpose to check we get the corresponding error in the request exception handler
// the error is : greater padding value 0c -> 1F
// ChannelFuture a = encoder.frameWriter().writeData(request.context, id, Buffer.buffer("hello").getByteBuf(), 12, false, request.context.newPromise());
// normal frame : 00 00 12 00 08 00 00 00 03 0c 68 65 6c 6c 6f 00 00 00 00 00 00 00 00 00 00 00 00
// corrupted frame : 00 00 12 00 08 00 00 00 03 1F 68 65 6c 6c 6f 00 00 00 00 00 00 00 00 00 00 00 00
request.channel.write(BufferInternal.buffer(new byte[]{
0x00, 0x00, 0x12, 0x00, 0x08, 0x00, 0x00, 0x00, (byte)(id & 0xFF), 0x1F, 0x68, 0x65, 0x6c, 0x6c,
0x6f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
}).getByteBuf());
request.context.flush();
});
});
await();
}
@Test
public void testPromiseStreamError() throws Exception {
Context ctx = vertx.getOrCreateContext();
Promise<Void> when = Promise.promise();
server.requestHandler(req -> {
req.response().push(HttpMethod.GET, "/wibble").onComplete(onSuccess(resp -> {
assertOnIOContext(ctx);
when.complete();
AtomicInteger erros = new AtomicInteger();
resp.exceptionHandler(err -> {
assertOnIOContext(ctx);
erros.incrementAndGet();
});
resp.closeHandler(v -> {
assertOnIOContext(ctx);
assertTrue("Was expecting errors to be > 0", erros.get() > 0);
testComplete();
});
resp.endHandler(v -> {
fail();
});
resp.setChunked(true).write("whatever"); // Transition to half-closed remote
}));
});
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onPushPromiseRead(ChannelHandlerContext ctx, int streamId, int promisedStreamId, Http2Headers headers, int padding) throws Http2Exception {
when.future().onComplete(ar -> {
Http2ConnectionEncoder encoder = request.encoder;
encoder.frameWriter().writeHeaders(request.context, promisedStreamId, GET("/"), 0, false, request.context.newPromise());
request.context.flush();
});
}
});
int id = request.nextStreamId();
Http2ConnectionEncoder encoder = request.encoder;
encoder.writeHeaders(request.context, id, GET("/"), 0, false, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testConnectionDecodeError() throws Exception {
Context ctx = vertx.getOrCreateContext();
waitFor(2);
Promise<Void> when = Promise.promise();
server.requestHandler(req -> {
AtomicInteger reqFailures = new AtomicInteger();
AtomicInteger respFailures = new AtomicInteger();
req.exceptionHandler(err -> {
assertOnIOContext(ctx);
reqFailures.incrementAndGet();
});
req.response().exceptionHandler(err -> {
assertOnIOContext(ctx);
respFailures.incrementAndGet();
});
req.response().closeHandler(v -> {
assertOnIOContext(ctx);
complete();
});
req.response().endHandler(v -> {
fail();
});
HttpConnection conn = req.connection();
AtomicInteger connFailures = new AtomicInteger();
conn.exceptionHandler(err -> {
assertOnIOContext(ctx);
connFailures.incrementAndGet();
});
conn.closeHandler(v -> {
assertTrue(connFailures.get() > 0);
assertOnIOContext(ctx);
complete();
});
when.complete();
});
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
Http2ConnectionEncoder encoder = request.encoder;
when.future().onComplete(ar -> {
// Send a stream ID that does not exists
encoder.frameWriter().writeRstStream(request.context, 10, 0, request.context.newPromise());
request.context.flush();
});
encoder.writeHeaders(request.context, id, GET("/"), 0, false, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testServerSendGoAwayNoError() throws Exception {
waitFor(2);
AtomicReference<HttpServerRequest> first = new AtomicReference<>();
AtomicInteger status = new AtomicInteger();
AtomicInteger closed = new AtomicInteger();
AtomicBoolean done = new AtomicBoolean();
Context ctx = vertx.getOrCreateContext();
Handler<HttpServerRequest> requestHandler = req -> {
if (first.compareAndSet(null, req)) {
req.exceptionHandler(err -> {
assertTrue(done.get());
});
req.response().exceptionHandler(err -> {
assertTrue(done.get());
});
} else {
assertEquals(0, status.getAndIncrement());
req.exceptionHandler(err -> {
closed.incrementAndGet();
});
req.response().exceptionHandler(err -> {
assertEquals(HttpClosedException.class, err.getClass());
assertEquals(0, ((HttpClosedException)err).goAway().getErrorCode());
closed.incrementAndGet();
});
HttpConnection conn = req.connection();
conn.shutdownHandler(v -> {
assertFalse(done.get());
});
conn.closeHandler(v -> {
assertTrue(done.get());
});
ctx.runOnContext(v1 -> {
conn.goAway(0, first.get().response().streamId());
vertx.setTimer(300, timerID -> {
assertEquals(1, status.getAndIncrement());
done.set(true);
complete();
});
});
}
};
testServerSendGoAway(requestHandler, 0);
}
@Ignore
@Test
public void testServerSendGoAwayInternalError() throws Exception {
waitFor(3);
AtomicReference<HttpServerRequest> first = new AtomicReference<>();
AtomicInteger status = new AtomicInteger();
AtomicInteger closed = new AtomicInteger();
Handler<HttpServerRequest> requestHandler = req -> {
if (first.compareAndSet(null, req)) {
req.exceptionHandler(err -> {
fail();
});
req.response().closeHandler(err -> {
closed.incrementAndGet();
});
req.response().endHandler(err -> {
closed.incrementAndGet();
});
} else {
assertEquals(0, status.getAndIncrement());
req.exceptionHandler(err -> {
closed.incrementAndGet();
});
req.response().closeHandler(err -> {
closed.incrementAndGet();
});
req.response().endHandler(err -> {
closed.incrementAndGet();
});
HttpConnection conn = req.connection();
conn.closeHandler(v -> {
assertEquals(5, closed.get());
assertEquals(1, status.get());
complete();
});
conn.shutdownHandler(v -> {
assertEquals(1, status.get());
complete();
});
conn.goAway(2, first.get().response().streamId());
}
};
testServerSendGoAway(requestHandler, 2);
}
@Test
public void testShutdownWithTimeout() throws Exception {
waitFor(4);
AtomicReference<HttpServerRequest> first = new AtomicReference<>();
AtomicInteger status = new AtomicInteger();
Handler<HttpServerRequest> requestHandler = req -> {
if (first.compareAndSet(null, req)) {
req.exceptionHandler(err -> {
fail();
});
req.response().closeHandler(err -> {
complete();
});
req.response().endHandler(err -> {
fail();
});
} else {
assertEquals(0, status.getAndIncrement());
req.exceptionHandler(err -> {
fail();
});
req.response().closeHandler(err -> {
complete();
});
req.response().endHandler(err -> {
fail();
});
HttpConnection conn = req.connection();
conn.closeHandler(v -> {
assertEquals(1, status.getAndIncrement());
complete();
});
conn.shutdown(300, TimeUnit.MILLISECONDS);
}
};
testServerSendGoAway(requestHandler, 0);
}
@Test
public void testShutdown() throws Exception {
waitFor(2);
AtomicReference<HttpServerRequest> first = new AtomicReference<>();
AtomicInteger status = new AtomicInteger();
Handler<HttpServerRequest> requestHandler = req -> {
if (first.compareAndSet(null, req)) {
req.exceptionHandler(err -> {
fail();
});
req.response().exceptionHandler(err -> {
fail();
});
} else {
assertEquals(0, status.getAndIncrement());
req.exceptionHandler(err -> {
fail();
});
req.response().exceptionHandler(err -> {
fail();
});
HttpConnection conn = req.connection();
conn.closeHandler(v -> {
assertEquals(2, status.getAndIncrement());
complete();
});
conn.shutdown();
vertx.setTimer(300, timerID -> {
assertEquals(1, status.getAndIncrement());
first.get().response().end();
req.response().end();
});
}
};
testServerSendGoAway(requestHandler, 0);
}
private void testServerSendGoAway(Handler<HttpServerRequest> requestHandler, int expectedError) throws Exception {
server.requestHandler(requestHandler);
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onGoAwayRead(ChannelHandlerContext ctx, int lastStreamId, long errorCode, ByteBuf debugData) throws Http2Exception {
vertx.runOnContext(v -> {
assertEquals(expectedError, errorCode);
complete();
});
}
});
Http2ConnectionEncoder encoder = request.encoder;
int id1 = request.nextStreamId();
encoder.writeHeaders(request.context, id1, GET("/"), 0, true, request.context.newPromise());
int id2 = request.nextStreamId();
encoder.writeHeaders(request.context, id2, GET("/"), 0, true, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testServerClose() throws Exception {
waitFor(2);
AtomicInteger status = new AtomicInteger();
Handler<HttpServerRequest> requestHandler = req -> {
HttpConnection conn = req.connection();
conn.shutdownHandler(v -> {
assertEquals(0, status.getAndIncrement());
});
conn.closeHandler(v -> {
assertEquals(1, status.getAndIncrement());
complete();
});
conn.close();
};
server.requestHandler(requestHandler);
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.channel.closeFuture().addListener(v1 -> {
vertx.runOnContext(v2 -> {
complete();
});
});
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onGoAwayRead(ChannelHandlerContext ctx, int lastStreamId, long errorCode, ByteBuf debugData) throws Http2Exception {
vertx.runOnContext(v -> {
assertEquals(0, errorCode);
});
}
});
Http2ConnectionEncoder encoder = request.encoder;
int id = request.nextStreamId();
encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testClientSendGoAwayNoError() throws Exception {
Promise<Void> abc = Promise.promise();
Context ctx = vertx.getOrCreateContext();
Handler<HttpServerRequest> requestHandler = req -> {
HttpConnection conn = req.connection();
AtomicInteger numShutdown = new AtomicInteger();
AtomicBoolean completed = new AtomicBoolean();
conn.shutdownHandler(v -> {
assertOnIOContext(ctx);
numShutdown.getAndIncrement();
vertx.setTimer(100, timerID -> {
// Delay so we can check the connection is not closed
completed.set(true);
testComplete();
});
});
conn.goAwayHandler(ga -> {
assertOnIOContext(ctx);
assertEquals(0, numShutdown.get());
req.response().end();
});
conn.closeHandler(v -> {
assertTrue(completed.get());
});
abc.complete();
};
server.requestHandler(requestHandler);
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
Http2ConnectionEncoder encoder = request.encoder;
int id = request.nextStreamId();
encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.context.flush();
abc.future().onComplete(ar -> {
encoder.writeGoAway(request.context, id, 0, Unpooled.EMPTY_BUFFER, request.context.newPromise());
request.context.flush();
});
});
await();
}
@Test
public void testClientSendGoAwayInternalError() throws Exception {
waitFor(3);
// On windows the client will close the channel immediately (since it's an error)
// and the server might see the channel inactive without receiving the close frame before
Assume.assumeFalse(Utils.isWindows());
Promise<Void> continuation = Promise.promise();
Context ctx = vertx.getOrCreateContext();
Handler<HttpServerRequest> requestHandler = req -> {
HttpConnection conn = req.connection();
AtomicInteger status = new AtomicInteger();
conn.goAwayHandler(ga -> {
assertOnIOContext(ctx);
assertEquals(0, status.getAndIncrement());
req.response().end();
complete();
});
conn.shutdownHandler(v -> {
assertOnIOContext(ctx);
assertEquals(1, status.getAndIncrement());
complete();
});
conn.closeHandler(v -> {
assertEquals(2, status.getAndIncrement());
complete();
});
continuation.complete();
};
server.requestHandler(requestHandler);
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
Http2ConnectionEncoder encoder = request.encoder;
int id = request.nextStreamId();
encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.context.flush();
continuation.future().onComplete(ar -> {
encoder.writeGoAway(request.context, id, 3, Unpooled.EMPTY_BUFFER, request.context.newPromise());
request.context.flush();
});
});
await();
}
@Test
public void testShutdownOverride() throws Exception {
AtomicLong shutdown = new AtomicLong();
Handler<HttpServerRequest> requestHandler = req -> {
HttpConnection conn = req.connection();
shutdown.set(System.currentTimeMillis());
conn.shutdown(10, TimeUnit.SECONDS);
vertx.setTimer(300, v -> {
conn.shutdown(300, TimeUnit.MILLISECONDS);
});
};
server.requestHandler(requestHandler);
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.channel.closeFuture().addListener(v1 -> {
vertx.runOnContext(v2 -> {
assertTrue(shutdown.get() - System.currentTimeMillis() < 1200);
testComplete();
});
});
Http2ConnectionEncoder encoder = request.encoder;
int id = request.nextStreamId();
encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testRequestResponseLifecycle() throws Exception {
waitFor(2);
server.requestHandler(req -> {
req.endHandler(v -> {
assertIllegalStateException(() -> req.setExpectMultipart(false));
assertIllegalStateException(() -> req.handler(buf -> {}));
assertIllegalStateException(() -> req.uploadHandler(upload -> {}));
assertIllegalStateException(() -> req.endHandler(v2 -> {}));
complete();
});
HttpServerResponse resp = req.response();
resp.setChunked(true).write(Buffer.buffer("whatever"));
assertTrue(resp.headWritten());
assertIllegalStateException(() -> resp.setChunked(false));
assertIllegalStateException(() -> resp.setStatusCode(100));
assertIllegalStateException(() -> resp.setStatusMessage("whatever"));
assertIllegalStateException(() -> resp.putHeader("a", "b"));
assertIllegalStateException(() -> resp.putHeader("a", (CharSequence) "b"));
assertIllegalStateException(() -> resp.putHeader("a", (Iterable<String>)Arrays.asList("a", "b")));
assertIllegalStateException(() -> resp.putHeader("a", (Arrays.<CharSequence>asList("a", "b"))));
assertIllegalStateException(resp::writeContinue);
resp.end();
assertIllegalStateException(() -> resp.write("a"));
assertIllegalStateException(() -> resp.write("a", "UTF-8"));
assertIllegalStateException(() -> resp.write(Buffer.buffer("a")));
assertIllegalStateException(resp::end);
assertIllegalStateException(() -> resp.end("a"));
assertIllegalStateException(() -> resp.end("a", "UTF-8"));
assertIllegalStateException(() -> resp.end(Buffer.buffer("a")));
assertIllegalStateException(() -> resp.sendFile("the-file.txt"));
assertIllegalStateException(() -> resp.closeHandler(v -> {}));
assertIllegalStateException(() -> resp.endHandler(v -> {}));
assertIllegalStateException(() -> resp.drainHandler(v -> {}));
assertIllegalStateException(() -> resp.exceptionHandler(err -> {}));
assertIllegalStateException(resp::writeQueueFull);
assertIllegalStateException(() -> resp.setWriteQueueMaxSize(100));
assertIllegalStateException(() -> resp.putTrailer("a", "b"));
assertIllegalStateException(() -> resp.putTrailer("a", (CharSequence) "b"));
assertIllegalStateException(() -> resp.putTrailer("a", (Iterable<String>)Arrays.asList("a", "b")));
assertIllegalStateException(() -> resp.putTrailer("a", (Arrays.<CharSequence>asList("a", "b"))));
assertIllegalStateException(() -> resp.push(HttpMethod.GET, "/whatever"));
complete();
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
request.encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testResponseCompressionDisabled() throws Exception {
waitFor(2);
String expected = TestUtils.randomAlphaString(1000);
server.requestHandler(req -> {
req.response().end(expected);
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
vertx.runOnContext(v -> {
assertEquals(null, headers.get(HttpHeaderNames.CONTENT_ENCODING));
complete();
});
}
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception {
String s = data.toString(StandardCharsets.UTF_8);
vertx.runOnContext(v -> {
assertEquals(expected, s);
complete();
});
return super.onDataRead(ctx, streamId, data, padding, endOfStream);
}
});
int id = request.nextStreamId();
request.encoder.writeHeaders(request.context, id, GET("/").add("accept-encoding", "gzip"), 0, true, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testResponseCompressionEnabled() throws Exception {
waitFor(2);
String expected = TestUtils.randomAlphaString(1000);
server.close();
server = vertx.createHttpServer(new HttpServerOptions(serverOptions).setCompressionSupported(true));
server.requestHandler(req -> {
req.response().end(expected);
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
vertx.runOnContext(v -> {
assertEquals("gzip", headers.get(HttpHeaderNames.CONTENT_ENCODING).toString());
complete();
});
}
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception {
byte[] bytes = new byte[data.readableBytes()];
data.readBytes(bytes);
vertx.runOnContext(v -> {
String decoded;
try {
GZIPInputStream in = new GZIPInputStream(new ByteArrayInputStream(bytes));
ByteArrayOutputStream baos = new ByteArrayOutputStream();
while (true) {
int i = in.read();
if (i == -1) {
break;
}
baos.write(i);
}
decoded = baos.toString();
} catch (IOException e) {
fail(e);
return;
}
assertEquals(expected, decoded);
complete();
});
return super.onDataRead(ctx, streamId, data, padding, endOfStream);
}
});
int id = request.nextStreamId();
request.encoder.writeHeaders(request.context, id, GET("/").add("accept-encoding", "gzip"), 0, true, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testResponseCompressionEnabledButResponseAlreadyCompressed() throws Exception {
waitFor(2);
String expected = TestUtils.randomAlphaString(1000);
server.close();
server = vertx.createHttpServer(serverOptions.setCompressionSupported(true));
server.requestHandler(req -> {
req.response().headers().set(HttpHeaderNames.CONTENT_ENCODING, "gzip");
try {
req.response().end(Buffer.buffer(TestUtils.compressGzip(expected)));
} catch (Exception e) {
fail(e);
}
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
vertx.runOnContext(v -> {
assertEquals("gzip", headers.get(HttpHeaderNames.CONTENT_ENCODING).toString());
complete();
});
}
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception {
byte[] bytes = new byte[data.readableBytes()];
data.readBytes(bytes);
vertx.runOnContext(v -> {
String decoded;
try {
GZIPInputStream in = new GZIPInputStream(new ByteArrayInputStream(bytes));
ByteArrayOutputStream baos = new ByteArrayOutputStream();
while (true) {
int i = in.read();
if (i == -1) {
break;
}
baos.write(i);
}
decoded = baos.toString();
} catch (IOException e) {
fail(e);
return;
}
assertEquals(expected, decoded);
complete();
});
return super.onDataRead(ctx, streamId, data, padding, endOfStream);
}
});
int id = request.nextStreamId();
request.encoder.writeHeaders(request.context, id, GET("/").add("accept-encoding", "gzip"), 0, true, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testResponseCompressionEnabledButExplicitlyDisabled() throws Exception {
waitFor(2);
String expected = TestUtils.randomAlphaString(1000);
server.close();
server = vertx.createHttpServer(new HttpServerOptions(serverOptions).setCompressionSupported(true));
server.requestHandler(req -> {
req.response().headers().set(HttpHeaderNames.CONTENT_ENCODING, "identity");
try {
req.response().end(expected);
} catch (Exception e) {
fail(e);
}
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
vertx.runOnContext(v -> {
assertFalse(headers.contains(HttpHeaderNames.CONTENT_ENCODING));
complete();
});
}
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception {
byte[] bytes = new byte[data.readableBytes()];
data.readBytes(bytes);
vertx.runOnContext(v -> {
String decoded = new String(bytes, StandardCharsets.UTF_8);
assertEquals(expected, decoded);
complete();
});
return super.onDataRead(ctx, streamId, data, padding, endOfStream);
}
});
int id = request.nextStreamId();
request.encoder.writeHeaders(request.context, id, GET("/").add("accept-encoding", "gzip"), 0, true, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testRequestCompressionEnabled() throws Exception {
String expected = TestUtils.randomAlphaString(1000);
byte[] expectedGzipped = TestUtils.compressGzip(expected);
server.close();
server = vertx.createHttpServer(new HttpServerOptions(serverOptions).setDecompressionSupported(true));
server.requestHandler(req -> {
StringBuilder postContent = new StringBuilder();
req.handler(buff -> {
postContent.append(buff.toString());
});
req.endHandler(v -> {
req.response().putHeader("content-type", "text/plain").end("");
assertEquals(expected, postContent.toString());
testComplete();
});
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
request.encoder.writeHeaders(request.context, id, POST("/").add("content-encoding", "gzip"), 0, false, request.context.newPromise());
request.encoder.writeData(request.context, id, BufferInternal.buffer(expectedGzipped).getByteBuf(), 0, true, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void test100ContinueHandledManually() throws Exception {
server.requestHandler(req -> {
assertEquals("100-continue", req.getHeader("expect"));
HttpServerResponse resp = req.response();
resp.writeContinue();
req.bodyHandler(body -> {
assertEquals("the-body", body.toString());
resp.putHeader("wibble", "wibble-value").end();
});
});
test100Continue();
}
@Test
public void test100ContinueHandledAutomatically() throws Exception {
server.close();
server = vertx.createHttpServer(new HttpServerOptions(serverOptions).setHandle100ContinueAutomatically(true));
server.requestHandler(req -> {
HttpServerResponse resp = req.response();
req.bodyHandler(body -> {
assertEquals("the-body", body.toString());
resp.putHeader("wibble", "wibble-value").end();
});
});
test100Continue();
}
private void test100Continue() throws Exception {
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
request.decoder.frameListener(new Http2EventAdapter() {
int count = 0;
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
switch (count++) {
case 0:
vertx.runOnContext(v -> {
assertEquals("100", headers.status().toString());
});
request.encoder.writeData(request.context, id, BufferInternal.buffer("the-body").getByteBuf(), 0, true, request.context.newPromise());
request.context.flush();
break;
case 1:
vertx.runOnContext(v -> {
assertEquals("200", headers.status().toString());
assertEquals("wibble-value", headers.get("wibble").toString());
testComplete();
});
break;
default:
vertx.runOnContext(v -> {
fail();
});
}
}
});
request.encoder.writeHeaders(request.context, id, GET("/").add("expect", "100-continue"), 0, false, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void test100ContinueRejectedManually() throws Exception {
server.requestHandler(req -> {
req.response().setStatusCode(405).end();
req.handler(buf -> {
fail();
});
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
request.decoder.frameListener(new Http2EventAdapter() {
int count = 0;
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
switch (count++) {
case 0:
vertx.runOnContext(v -> {
assertEquals("405", headers.status().toString());
vertx.setTimer(100, v2 -> {
testComplete();
});
});
break;
default:
vertx.runOnContext(v -> {
fail();
});
}
}
});
request.encoder.writeHeaders(request.context, id, GET("/").add("expect", "100-continue"), 0, false, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testNetSocketConnect() throws Exception {
waitFor(4);
server.requestHandler(req -> {
req.toNetSocket().onComplete(onSuccess(socket -> {
AtomicInteger status = new AtomicInteger();
socket.handler(buff -> {
switch (status.getAndIncrement()) {
case 0:
assertEquals(Buffer.buffer("some-data"), buff);
socket.write(buff).onComplete(onSuccess(v2 -> complete()));
break;
case 1:
assertEquals(Buffer.buffer("last-data"), buff);
break;
default:
fail();
break;
}
});
socket.endHandler(v1 -> {
assertEquals(2, status.getAndIncrement());
socket.end(Buffer.buffer("last-data")).onComplete(onSuccess(v2 -> complete()));
});
socket.closeHandler(v -> complete());
}));
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
vertx.runOnContext(v -> {
assertEquals("200", headers.status().toString());
assertFalse(endStream);
});
request.encoder.writeData(request.context, id, BufferInternal.buffer("some-data").getByteBuf(), 0, false, request.context.newPromise());
request.context.flush();
}
StringBuilder received = new StringBuilder();
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception {
String s = data.toString(StandardCharsets.UTF_8);
received.append(s);
if (received.toString().equals("some-data")) {
received.setLength(0);
vertx.runOnContext(v -> {
assertFalse(endOfStream);
});
request.encoder.writeData(request.context, id, BufferInternal.buffer("last-data").getByteBuf(), 0, true, request.context.newPromise());
} else if (endOfStream) {
vertx.runOnContext(v -> {
assertEquals("last-data", received.toString());
complete();
});
}
return data.readableBytes() + padding;
}
});
request.encoder.writeHeaders(request.context, id, new DefaultHttp2Headers().method("CONNECT").authority("example.com:80"), 0, false, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
@DetectFileDescriptorLeaks
public void testNetSocketSendFile() throws Exception {
Buffer expected = Buffer.buffer(TestUtils.randomAlphaString(1000 * 1000));
File tmp = createTempFile(expected);
testNetSocketSendFile(expected, tmp.getAbsolutePath(), 0, expected.length());
}
@Test
public void testNetSocketSendFileRange() throws Exception {
Buffer expected = Buffer.buffer(TestUtils.randomAlphaString(1000 * 1000));
File tmp = createTempFile(expected);
int from = 200 * 1000;
int to = 700 * 1000;
testNetSocketSendFile(expected.slice(from, to), tmp.getAbsolutePath(), from, to - from);
}
private void testNetSocketSendFile(Buffer expected, String path, long offset, long length) throws Exception {
server.requestHandler(req -> {
req.toNetSocket().onComplete(onSuccess(socket -> {
socket.sendFile(path, offset, length).onComplete(onSuccess(v -> {
socket.end();
}));
}));
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
vertx.runOnContext(v -> {
assertEquals("200", headers.status().toString());
assertFalse(endStream);
});
}
Buffer received = Buffer.buffer();
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception {
byte[] tmp = new byte[data.readableBytes()];
data.getBytes(data.readerIndex(), tmp);
received.appendBytes(tmp);
if (endOfStream) {
vertx.runOnContext(v -> {
assertEquals(received, expected);
testComplete();
});
}
return data.readableBytes() + padding;
}
});
request.encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testServerCloseNetSocket() throws Exception {
waitFor(2);
final AtomicInteger writeAcks = new AtomicInteger(0);
AtomicInteger status = new AtomicInteger();
server.requestHandler(req -> {
req.toNetSocket().onComplete(onSuccess(socket -> {
socket.handler(buff -> {
switch (status.getAndIncrement()) {
case 0:
assertEquals(Buffer.buffer("some-data"), buff);
socket.write(buff).onComplete(onSuccess(v -> writeAcks.incrementAndGet()));
socket.close();
break;
case 1:
assertEquals(Buffer.buffer("last-data"), buff);
break;
default:
fail();
break;
}
});
socket.endHandler(v -> {
assertEquals(2, status.getAndIncrement());
});
socket.closeHandler(v -> {
assertEquals(3, status.getAndIncrement());
complete();
assertEquals(1, writeAcks.get());
});
}));
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
request.decoder.frameListener(new Http2EventAdapter() {
int count = 0;
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
int c = count++;
vertx.runOnContext(v -> {
assertEquals(0, c);
});
request.encoder.writeData(request.context, id, BufferInternal.buffer("some-data").getByteBuf(), 0, false, request.context.newPromise());
request.context.flush();
}
StringBuilder received = new StringBuilder();
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception {
String s = data.toString(StandardCharsets.UTF_8);
received.append(s);
if (endOfStream) {
request.encoder.writeData(request.context, id, BufferInternal.buffer("last-data").getByteBuf(), 0, true, request.context.newPromise());
vertx.runOnContext(v -> {
assertEquals("some-data", received.toString());
complete();
});
}
return data.readableBytes() + padding;
}
});
request.encoder.writeHeaders(request.context, id, GET("/"), 0, false, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testNetSocketHandleReset() throws Exception {
server.requestHandler(req -> {
req.toNetSocket().onComplete(onSuccess(socket -> {
AtomicInteger status = new AtomicInteger();
socket.exceptionHandler(err -> {
if (err instanceof StreamResetException) {
assertEquals(0, status.getAndIncrement());
StreamResetException ex = (StreamResetException) err;
assertEquals(0, ex.getCode());
}
});
socket.endHandler(v -> {
// fail();
});
socket.closeHandler(v -> {
assertEquals(1, status.getAndIncrement());
testComplete();
});
}));
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
request.decoder.frameListener(new Http2EventAdapter() {
int count = 0;
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
int c = count++;
vertx.runOnContext(v -> {
assertEquals(0, c);
});
request.encoder.writeRstStream(ctx, streamId, 0, ctx.newPromise());
request.context.flush();
}
});
request.encoder.writeHeaders(request.context, id, GET("/"), 0, false, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testNetSocketPauseResume() throws Exception {
testStreamPauseResume(req -> req.toNetSocket().map(so -> so));
}
@Test
public void testNetSocketWritability() throws Exception {
testStreamWritability(req -> req.toNetSocket().map(so -> so));
}
@Test
public void testUnknownFrame() throws Exception {
Buffer expectedSend = TestUtils.randomBuffer(500);
Buffer expectedRecv = TestUtils.randomBuffer(500);
Context ctx = vertx.getOrCreateContext();
server.requestHandler(req -> {
req.customFrameHandler(frame -> {
assertOnIOContext(ctx);
assertEquals(10, frame.type());
assertEquals(253, frame.flags());
assertEquals(expectedSend, frame.payload());
HttpServerResponse resp = req.response();
resp.writeCustomFrame(12, 134, expectedRecv);
resp.end();
});
});
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
request.decoder.frameListener(new Http2EventAdapter() {
int status = 0;
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
int s = status++;
vertx.runOnContext(v -> {
assertEquals(0, s);
});
}
@Override
public void onUnknownFrame(ChannelHandlerContext ctx, byte frameType, int streamId, Http2Flags flags, ByteBuf payload) {
int s = status++;
byte[] tmp = new byte[payload.readableBytes()];
payload.getBytes(payload.readerIndex(), tmp);
Buffer recv = Buffer.buffer().appendBytes(tmp);
vertx.runOnContext(v -> {
assertEquals(1, s);
assertEquals(12, frameType);
assertEquals(134, flags.value());
assertEquals(expectedRecv, recv);
});
}
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception {
int len = data.readableBytes();
int s = status++;
vertx.runOnContext(v -> {
assertEquals(2, s);
assertEquals(0, len);
assertTrue(endOfStream);
testComplete();
});
return data.readableBytes() + padding;
}
});
request.encoder.writeHeaders(request.context, id, GET("/"), 0, false, request.context.newPromise());
request.encoder.writeFrame(request.context, (byte)10, id, new Http2Flags((short) 253), ((BufferInternal)expectedSend).getByteBuf(), request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testUpgradeToClearTextGet() throws Exception {
testUpgradeToClearText(HttpMethod.GET, Buffer.buffer(), options -> {});
}
@Test
public void testUpgradeToClearTextPut() throws Exception {
Buffer expected = Buffer.buffer(TestUtils.randomAlphaString(20));
testUpgradeToClearText(HttpMethod.PUT, expected, options -> {});
}
@Test
public void testUpgradeToClearTextWithCompression() throws Exception {
Buffer expected = Buffer.buffer(TestUtils.randomAlphaString(8192));
testUpgradeToClearText(HttpMethod.PUT, expected, options -> options.setCompressionSupported(true));
}
@Test
public void testUpgradeToClearTextInvalidHost() throws Exception {
testUpgradeToClearText(new RequestOptions(requestOptions).putHeader("Host", "localhost:not"), options -> {})
.compose(req -> req.send()).onComplete(onFailure(failure -> {
// Regression
assertEquals(StreamResetException.class, failure.getClass());
assertEquals(1L, ((StreamResetException)failure).getCode());
testComplete();
}));
await();
}
private void testUpgradeToClearText(HttpMethod method, Buffer expected, Handler<HttpServerOptions> optionsConfig) throws Exception {
Future<HttpClientRequest> fut = testUpgradeToClearText(new RequestOptions(requestOptions).setMethod(method), optionsConfig);
fut.compose(req -> req.send(expected)
.andThen(onSuccess(resp -> {
assertEquals(200, resp.statusCode());
assertEquals(HttpVersion.HTTP_2, resp.version());
}))
.compose(resp -> resp.body())).onComplete(onSuccess(body -> {
assertEquals(expected, body);
testComplete();
}));
await();
}
private Future<HttpClientRequest> testUpgradeToClearText(RequestOptions request,
Handler<HttpServerOptions> optionsConfig) throws Exception {
server.close();
optionsConfig.handle(serverOptions);
server = vertx.createHttpServer(new HttpServerOptions(serverOptions)
.setHost(DEFAULT_HTTP_HOST)
.setPort(DEFAULT_HTTP_PORT)
.setUseAlpn(false)
.setSsl(false)
.setInitialSettings(new io.vertx.core.http.Http2Settings().setMaxConcurrentStreams(20000)));
server.requestHandler(req -> {
assertEquals("http", req.scheme());
assertEquals(request.getMethod(), req.method());
assertEquals(HttpVersion.HTTP_2, req.version());
io.vertx.core.http.Http2Settings remoteSettings = req.connection().remoteSettings();
assertEquals(10000, remoteSettings.getMaxConcurrentStreams());
assertFalse(req.isSSL());
req.bodyHandler(body -> {
vertx.setTimer(10, id -> {
req.response().end(body);
});
});
}).connectionHandler(conn -> {
assertNotNull(conn);
});
startServer(testAddress);
client = vertx.createHttpClient(clientOptions.
setUseAlpn(false).
setSsl(false).
setInitialSettings(new io.vertx.core.http.Http2Settings().setMaxConcurrentStreams(10000)));
return client.request(request);
}
@Test
public void testUpgradeToClearTextIdleTimeout() throws Exception {
server.close();
server = vertx.createHttpServer(new HttpServerOptions(serverOptions)
.setHost(DEFAULT_HTTP_HOST)
.setPort(DEFAULT_HTTP_PORT)
.setUseAlpn(false)
.setSsl(false)
.setIdleTimeout(250)
.setIdleTimeoutUnit(TimeUnit.MILLISECONDS));
server.requestHandler(req -> {
req.connection().closeHandler(v -> {
testComplete();
});
});
startServer(testAddress);
client = vertx.createHttpClient(clientOptions.
setUseAlpn(false).
setSsl(false));
client.request(requestOptions).compose(request -> request.send());
await();
}
@Test
public void testPushPromiseClearText() throws Exception {
waitFor(2);
server.close();
server = vertx.createHttpServer(new HttpServerOptions(serverOptions).
setHost(DEFAULT_HTTP_HOST).
setPort(DEFAULT_HTTP_PORT).
setUseAlpn(false).
setSsl(false));
server.requestHandler(req -> {
req.response().push(HttpMethod.GET, "/resource").onComplete(onSuccess(resp -> {
resp.end("the-pushed-response");
}));
req.response().end();
});
startServer(testAddress);
client.close();
client = vertx.createHttpClient(clientOptions.setUseAlpn(false).setSsl(false));
client.request(requestOptions).onComplete(onSuccess(req -> {
req.exceptionHandler(this::fail).pushHandler(pushedReq -> {
pushedReq.response().onComplete(onSuccess(pushResp -> {
pushResp.bodyHandler(buff -> {
assertEquals("the-pushed-response", buff.toString());
complete();
});
}));
}).send().onComplete(onSuccess(resp -> {
assertEquals(HttpVersion.HTTP_2, resp.version());
complete();
}));
}));
await();
}
@Test
public void testUpgradeToClearTextInvalidConnectionHeader() throws Exception {
Assume.assumeFalse(serverOptions.getHttp2MultiplexImplementation());
testUpgradeFailure(vertx.getOrCreateContext(), (client, handler) -> {
client.request(new RequestOptions()
.setPort(DEFAULT_HTTP_PORT)
.setHost(DEFAULT_HTTP_HOST)
.setURI("/somepath")).onComplete(onSuccess(req -> {
req
.putHeader("Upgrade", "h2c")
.putHeader("Connection", "Upgrade")
.putHeader("HTTP2-Settings", HttpUtils.encodeSettings(new io.vertx.core.http.Http2Settings()))
.send()
.onComplete(handler);
}));
});
}
@Test
public void testUpgradeToClearTextMalformedSettings() throws Exception {
Assume.assumeFalse(serverOptions.getHttp2MultiplexImplementation());
testUpgradeFailure(vertx.getOrCreateContext(), (client, handler) -> {
client.request(new RequestOptions()
.setPort(DEFAULT_HTTP_PORT)
.setHost(DEFAULT_HTTP_HOST)
.setURI("/somepath")).onComplete(onSuccess(req -> {
req
.putHeader("Upgrade", "h2c")
.putHeader("Connection", "Upgrade, HTTP2-Settings")
.putHeader("HTTP2-Settings", "incorrect-settings")
.send()
.onComplete(handler);
}));
});
}
@Test
public void testUpgradeToClearTextInvalidSettings() throws Exception {
Assume.assumeFalse(serverOptions.getHttp2MultiplexImplementation());
Buffer buffer = Buffer.buffer();
buffer.appendUnsignedShort(5).appendUnsignedInt((0xFFFFFF + 1));
String s = new String(Base64.getUrlEncoder().encode(buffer.getBytes()), StandardCharsets.UTF_8);
testUpgradeFailure(vertx.getOrCreateContext(), (client, handler) -> {
client.request(new RequestOptions()
.setPort(DEFAULT_HTTP_PORT)
.setHost(DEFAULT_HTTP_HOST)
.setURI("/somepath")).onComplete(onSuccess(req -> {
req
.putHeader("Upgrade", "h2c")
.putHeader("Connection", "Upgrade, HTTP2-Settings")
.putHeader("HTTP2-Settings", s)
.send()
.onComplete(handler);
}));
});
}
@Test
public void testUpgradeToClearTextMissingSettings() throws Exception {
Assume.assumeFalse(serverOptions.getPerMessageWebSocketCompressionSupported());
testUpgradeFailure(vertx.getOrCreateContext(), (client, handler) -> {
client.request(new RequestOptions()
.setPort(DEFAULT_HTTP_PORT)
.setHost(DEFAULT_HTTP_HOST)
.setURI("/somepath")).onComplete(onSuccess(req -> {
req
.putHeader("Upgrade", "h2c")
.putHeader("Connection", "Upgrade, HTTP2-Settings")
.send()
.onComplete(handler);
}));
});
}
@Test
public void testUpgradeToClearTextWorkerContext() throws Exception {
Assume.assumeFalse(serverOptions.getHttp2MultiplexImplementation());
testUpgradeFailure(vertx.getOrCreateContext(), (client, handler) -> {
client.request(new RequestOptions()
.setPort(DEFAULT_HTTP_PORT)
.setHost(DEFAULT_HTTP_HOST)
.setURI("/somepath")).onComplete(onSuccess(req -> {
req
.putHeader("Upgrade", "h2c")
.putHeader("Connection", "Upgrade")
.putHeader("HTTP2-Settings", HttpUtils.encodeSettings(new io.vertx.core.http.Http2Settings()))
.send().onComplete(handler);
}));
});
}
private void testUpgradeFailure(Context context, BiConsumer<HttpClient, Handler<AsyncResult<HttpClientResponse>>> doRequest) throws Exception {
server.close();
server = vertx.createHttpServer(new HttpServerOptions(serverOptions).setHost(DEFAULT_HTTP_HOST).setPort(DEFAULT_HTTP_PORT).setUseAlpn(false).setSsl(false));
server.requestHandler(req -> {
fail();
});
startServer(context);
client.close();
client = vertx.createHttpClient(clientOptions.setProtocolVersion(HttpVersion.HTTP_1_1).setUseAlpn(false).setSsl(false));
doRequest.accept(client, onSuccess(resp -> {
assertEquals(400, resp.statusCode());
assertEquals(HttpVersion.HTTP_1_1, resp.version());
testComplete();
}));
await();
}
@Test
public void testUpgradeToClearTextPartialFailure() throws Exception {
Assume.assumeFalse(serverOptions.getHttp2MultiplexImplementation());
server.close();
server = vertx.createHttpServer(new HttpServerOptions(serverOptions).setHost(DEFAULT_HTTP_HOST).setPort(DEFAULT_HTTP_PORT).setUseAlpn(false).setSsl(false));
CompletableFuture<Void> closeRequest = new CompletableFuture<>();
server.requestHandler(req -> {
closeRequest.complete(null);
AtomicBoolean processed = new AtomicBoolean();
req.exceptionHandler(err -> {
if (processed.compareAndSet(false, true)) {
testComplete();
}
});
});
startServer(testAddress);
client.close();
client = vertx.createHttpClient(clientOptions.setProtocolVersion(HttpVersion.HTTP_1_1).setUseAlpn(false).setSsl(false));
client.request(new RequestOptions(requestOptions).setMethod(HttpMethod.PUT)).onComplete(onSuccess(req -> {
req
.putHeader("Upgrade", "h2c")
.putHeader("Connection", "Upgrade,HTTP2-Settings")
.putHeader("HTTP2-Settings", HttpUtils.encodeSettings(new io.vertx.core.http.Http2Settings()))
.setChunked(true);
req.write("some-data");
closeRequest.thenAccept(v -> {
req.connection().close();
});
}));
await();
}
@Test
public void testIdleTimeout() throws Exception {
waitFor(4);
server.close();
server = vertx.createHttpServer(new HttpServerOptions(serverOptions).setIdleTimeoutUnit(TimeUnit.MILLISECONDS).setIdleTimeout(2000));
server.requestHandler(req -> {
req.exceptionHandler(err -> {
assertTrue(err instanceof HttpClosedException);
complete();
});
req.response().closeHandler(v -> {
complete();
});
req.response().endHandler(v -> {
fail();
});
req.connection().closeHandler(v -> {
complete();
});
});
startServer();
TestClient client = new TestClient();
Channel channel = client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
int id = request.nextStreamId();
request.decoder.frameListener(new Http2EventAdapter() {
});
request.encoder.writeHeaders(request.context, id, GET("/"), 0, false, request.context.newPromise());
request.context.flush();
});
channel.closeFuture().addListener(v1 -> {
vertx.runOnContext(v2 -> {
complete();
});
});
await();
}
@Test
public void testSendPing() throws Exception {
waitFor(2);
Buffer expected = TestUtils.randomBuffer(8);
Context ctx = vertx.getOrCreateContext();
server.connectionHandler(conn -> {
conn.ping(expected).onComplete(onSuccess(res -> {
assertSame(ctx, Vertx.currentContext());
assertEquals(expected, res);
complete();
}));
});
server.requestHandler(req -> fail());
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onPingRead(ChannelHandlerContext ctx, long data) throws Http2Exception {
Buffer buffer = Buffer.buffer().appendLong(data);
vertx.runOnContext(v -> {
assertEquals(expected, buffer);
complete();
});
}
});
});
await();
}
@Test
public void testReceivePing() throws Exception {
Buffer expected = TestUtils.randomBuffer(8);
Context ctx = vertx.getOrCreateContext();
server.connectionHandler(conn -> {
conn.pingHandler(buff -> {
assertOnIOContext(ctx);
assertEquals(expected, buff);
testComplete();
});
});
server.requestHandler(req -> fail());
startServer(ctx);
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.encoder.writePing(request.context, false, expected.getLong(0), request.context.newPromise());
});
await();
}
@Test
public void testPriorKnowledge() throws Exception {
server.close();
server = vertx.createHttpServer(new HttpServerOptions(serverOptions)
.setSsl(false)
.setPort(DEFAULT_HTTP_PORT)
.setHost(DEFAULT_HTTP_HOST)
);
server.requestHandler(req -> {
req.response().end("Hello World");
});
startServer();
TestClient client = new TestClient() {
@Override
protected ChannelInitializer channelInitializer(int port, String host, Promise<SslHandshakeCompletionEvent> latch, Consumer<Connection> handler) {
return new ChannelInitializer() {
@Override
protected void initChannel(Channel ch) throws Exception {
ChannelPipeline p = ch.pipeline();
Http2Connection connection = new DefaultHttp2Connection(false);
TestClientHandlerBuilder clientHandlerBuilder = new TestClientHandlerBuilder(handler);
TestClientHandler clientHandler = clientHandlerBuilder.build(connection);
p.addLast(clientHandler);
latch.complete(SslHandshakeCompletionEvent.SUCCESS);
}
};
}
};
client.connect(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, request -> {
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
vertx.runOnContext(v -> {
testComplete();
});
}
});
int id = request.nextStreamId();
request.encoder.writeHeaders(request.context, id, GET("/"), 0, true, request.context.newPromise());
request.context.flush();
});
await();
}
@Test
public void testConnectionWindowSize() throws Exception {
server.close();
server = vertx.createHttpServer(new HttpServerOptions(serverOptions).setHttp2ConnectionWindowSize(65535 + 65535));
server.requestHandler(req -> {
req.response().end();
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onWindowUpdateRead(ChannelHandlerContext ctx, int streamId, int windowSizeIncrement) throws Http2Exception {
vertx.runOnContext(v -> {
assertEquals(65535, windowSizeIncrement);
testComplete();
});
}
});
});
await();
}
@Test
public void testUpdateConnectionWindowSize() throws Exception {
server.connectionHandler(conn -> {
assertEquals(65535, conn.getWindowSize());
conn.setWindowSize(65535 + 10000);
assertEquals(65535 + 10000, conn.getWindowSize());
conn.setWindowSize(65535 + 65535);
assertEquals(65535 + 65535, conn.getWindowSize());
}).requestHandler(req -> {
req.response().end();
});
startServer();
TestClient client = new TestClient();
client.connect(DEFAULT_HTTPS_PORT, DEFAULT_HTTPS_HOST, request -> {
request.decoder.frameListener(new Http2EventAdapter() {
@Override
public void onWindowUpdateRead(ChannelHandlerContext ctx, int streamId, int windowSizeIncrement) throws Http2Exception {
vertx.runOnContext(v -> {
assertEquals(65535, windowSizeIncrement);
testComplete();
});
}
});
});
await();
}
| Anonymous |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java | {
"start": 85608,
"end": 89008
} | class ____ extends TestRecoveryTargetHandler {
@Override
public void receiveFileInfo(
List<String> phase1FileNames,
List<Long> phase1FileSizes,
List<String> phase1ExistingFileNames,
List<Long> phase1ExistingFileSizes,
int totalTranslogOps,
ActionListener<Void> listener
) {
listener.onResponse(null);
}
@Override
public void writeFileChunk(
StoreFileMetadata fileMetadata,
long position,
ReleasableBytesReference content,
boolean lastChunk,
int totalTranslogOps,
ActionListener<Void> listener
) {
listener.onResponse(null);
}
@Override
public void cleanFiles(
int totalTranslogOps,
long globalCheckpoint,
Store.MetadataSnapshot sourceMetadata,
ActionListener<Void> listener
) {
listener.onResponse(null);
}
}
private Translog.Snapshot newTranslogSnapshot(List<Translog.Operation> operations, List<Translog.Operation> operationsToSkip) {
Iterator<Translog.Operation> iterator = operations.iterator();
return new Translog.Snapshot() {
int skippedCount = 0;
@Override
public int totalOperations() {
return operations.size();
}
@Override
public int skippedOperations() {
return skippedCount;
}
@Override
public Translog.Operation next() {
while (iterator.hasNext()) {
Translog.Operation op = iterator.next();
if (operationsToSkip.contains(op)) {
skippedCount++;
} else {
return op;
}
}
return null;
}
@Override
public void close() {
}
};
}
public static Translog.Operation generateOperation(long seqNo) {
final Translog.Operation op;
if (randomBoolean()) {
op = new Translog.Index(
"id",
seqNo,
randomNonNegativeLong(),
randomNonNegativeLong(),
TRANSLOG_OPERATION_SOURCE,
randomBoolean() ? randomAlphaOfLengthBetween(1, 5) : null,
randomNonNegativeLong()
);
} else if (randomBoolean()) {
op = new Translog.Delete("id", seqNo, randomNonNegativeLong(), randomNonNegativeLong());
} else {
op = new Translog.NoOp(seqNo, randomNonNegativeLong(), "test");
}
return op;
}
private static List<Translog.Operation> generateOperations(int numOps) {
final List<Translog.Operation> operations = new ArrayList<>(numOps);
final BytesArray source = new BytesArray("{}".getBytes(StandardCharsets.UTF_8));
final Set<Long> seqNos = new HashSet<>();
for (int i = 0; i < numOps; i++) {
final long seqNo = randomValueOtherThanMany(n -> seqNos.add(n) == false, ESTestCase::randomNonNegativeLong);
operations.add(generateOperation(seqNo));
}
return operations;
}
static | Phase1RecoveryTargetHandler |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/authentication/www/BasicAuthenticationConverterTests.java | {
"start": 1682,
"end": 4928
} | class ____ {
@Mock
private AuthenticationDetailsSource<HttpServletRequest, ?> authenticationDetailsSource;
private BasicAuthenticationConverter converter;
@BeforeEach
public void setup() {
this.converter = new BasicAuthenticationConverter(this.authenticationDetailsSource);
}
@Test
public void testNormalOperation() {
String token = "rod:koala";
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "Basic " + CodecTestUtils.encodeBase64(token));
UsernamePasswordAuthenticationToken authentication = this.converter.convert(request);
verify(this.authenticationDetailsSource).buildDetails(any());
assertThat(authentication).isNotNull();
assertThat(authentication.getName()).isEqualTo("rod");
}
@Test
public void requestWhenAuthorizationSchemeInMixedCaseThenAuthenticates() {
String token = "rod:koala";
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "BaSiC " + CodecTestUtils.encodeBase64(token));
UsernamePasswordAuthenticationToken authentication = this.converter.convert(request);
verify(this.authenticationDetailsSource).buildDetails(any());
assertThat(authentication).isNotNull();
assertThat(authentication.getName()).isEqualTo("rod");
}
@Test
public void testWhenUnsupportedAuthorizationHeaderThenIgnored() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "Bearer someOtherToken");
UsernamePasswordAuthenticationToken authentication = this.converter.convert(request);
verifyNoMoreInteractions(this.authenticationDetailsSource);
assertThat(authentication).isNull();
}
@Test
public void testWhenInvalidBasicAuthorizationTokenThenError() {
String token = "NOT_A_VALID_TOKEN_AS_MISSING_COLON";
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "Basic " + CodecTestUtils.encodeBase64(token));
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.converter.convert(request));
}
@Test
public void testWhenInvalidBase64ThenError() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "Basic NOT_VALID_BASE64");
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.converter.convert(request));
}
@Test
public void convertWhenEmptyPassword() {
String token = "rod:";
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "Basic " + CodecTestUtils.encodeBase64(token));
UsernamePasswordAuthenticationToken authentication = this.converter.convert(request);
verify(this.authenticationDetailsSource).buildDetails(any());
assertThat(authentication).isNotNull();
assertThat(authentication.getName()).isEqualTo("rod");
assertThat(authentication.getCredentials()).isEqualTo("");
}
@Test
public void requestWhenEmptyBasicAuthorizationHeaderTokenThenError() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "Basic ");
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.converter.convert(request));
}
}
| BasicAuthenticationConverterTests |
java | quarkusio__quarkus | extensions/spring-boot-properties/deployment/src/main/java/io/quarkus/spring/boot/properties/deployment/InterfaceConfigurationPropertiesUtil.java | {
"start": 18657,
"end": 19172
} | class ____ {
private final String name;
private final String defaultValue;
NameAndDefaultValue(String name) {
this(name, null);
}
NameAndDefaultValue(String name, String defaultValue) {
this.name = name;
this.defaultValue = defaultValue;
}
public String getName() {
return name;
}
public String getDefaultValue() {
return defaultValue;
}
}
static | NameAndDefaultValue |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessReflect.java | {
"start": 1526,
"end": 3578
} | class ____ extends FieldAccessor {
protected final Field field;
private final boolean isStringable;
private final boolean isCustomEncoded;
public ReflectionBasedAccessor(Field field) {
this.field = field;
this.field.setAccessible(true);
isStringable = field.isAnnotationPresent(Stringable.class);
isCustomEncoded = ReflectionUtil.getAvroEncode(field) != null;
}
@Override
public String toString() {
return field.getName();
}
@Override
public Object get(Object object) throws IllegalAccessException {
return field.get(object);
}
@Override
public void set(Object object, Object value) throws IllegalAccessException, IOException {
if (value == null && field.getType().isPrimitive()) {
Object defaultValue = null;
if (int.class.equals(field.getType())) {
defaultValue = INT_DEFAULT_VALUE;
} else if (float.class.equals(field.getType())) {
defaultValue = FLOAT_DEFAULT_VALUE;
} else if (short.class.equals(field.getType())) {
defaultValue = SHORT_DEFAULT_VALUE;
} else if (byte.class.equals(field.getType())) {
defaultValue = BYTE_DEFAULT_VALUE;
} else if (boolean.class.equals(field.getType())) {
defaultValue = BOOLEAN_DEFAULT_VALUE;
} else if (char.class.equals(field.getType())) {
defaultValue = CHAR_DEFAULT_VALUE;
} else if (long.class.equals(field.getType())) {
defaultValue = LONG_DEFAULT_VALUE;
} else if (double.class.equals(field.getType())) {
defaultValue = DOUBLE_DEFAULT_VALUE;
}
field.set(object, defaultValue);
} else {
field.set(object, value);
}
}
@Override
protected Field getField() {
return field;
}
@Override
protected boolean isStringable() {
return isStringable;
}
@Override
protected boolean isCustomEncoded() {
return isCustomEncoded;
}
}
private static final | ReflectionBasedAccessor |
java | alibaba__fastjson | src/test/java/com/alibaba/fastjson/deserializer/issues3796/bean/ObjectQ.java | {
"start": 95,
"end": 577
} | class ____ {
private int a;
private int b;
private boolean c = false;
private List<CommonObject> d;
public int getA() {
return a;
}
public void setA(int a) {
this.a = a;
}
public int getB() {
return b;
}
public void setB(int b) {
this.b = b;
}
public boolean isC() {
return c;
}
public void setC(boolean c) {
this.c = c;
}
public List<CommonObject> getD() {
return d;
}
public void setD(List<CommonObject> d) {
this.d = d;
}
}
| ObjectQ |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AbstractNativeAnalyticsProcess.java | {
"start": 848,
"end": 2966
} | class ____<Result> extends AbstractNativeProcess implements AnalyticsProcess<Result> {
private final String name;
private final ProcessResultsParser<Result> resultsParser;
protected AbstractNativeAnalyticsProcess(
String name,
ConstructingObjectParser<Result, Void> resultParser,
String jobId,
NativeController nativeController,
ProcessPipes processPipes,
int numberOfFields,
List<Path> filesToDelete,
Consumer<String> onProcessCrash,
NamedXContentRegistry namedXContentRegistry
) {
super(jobId, nativeController, processPipes, numberOfFields, filesToDelete, onProcessCrash);
this.name = Objects.requireNonNull(name);
this.resultsParser = new ProcessResultsParser<>(Objects.requireNonNull(resultParser), namedXContentRegistry);
}
@Override
public String getName() {
return name;
}
@Override
public void persistState() {
// Nothing to persist
}
@Override
public void persistState(long timestamp, String id, String description) {
// Nothing to persist
}
@Override
public void writeEndOfDataMessage() throws IOException {
new AnalyticsControlMessageWriter(recordWriter(), numberOfFields()).writeEndOfData();
}
@Override
public Iterator<Result> readAnalyticsResults() {
return resultsParser.parseResults(processOutStream());
}
@Override
public void close() throws IOException {
try {
super.close();
} finally {
// Unlike autodetect where closing the process input stream initiates
// termination and additional output from the process which forces us
// to close the output stream after we've finished processing its results,
// in analytics we wait until we've read all results and then we close the
// process. Thus, we can take care of consuming and closing the output
// stream within close itself.
consumeAndCloseOutputStream();
}
}
}
| AbstractNativeAnalyticsProcess |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/module/SimpleModule.java | {
"start": 13375,
"end": 16988
} | class ____.
*/
public SimpleModule addKeyDeserializer(Class<?> type, KeyDeserializer deser)
{
_checkNotNull(type, "type to register key deserializer for");
_checkNotNull(deser, "key deserializer");
if (_keyDeserializers == null) {
_keyDeserializers = new SimpleKeyDeserializers();
}
_keyDeserializers.addDeserializer(type, deser);
return this;
}
/*
/**********************************************************************
/* Configuration methods, type mapping
/**********************************************************************
*/
/**
* Lazily-constructed resolver used for storing mappings from
* abstract classes to more specific implementing classes
* (which may be abstract or concrete)
*/
public <T> SimpleModule addAbstractTypeMapping(Class<T> superType,
Class<? extends T> subType)
{
_checkNotNull(superType, "abstract type to map");
_checkNotNull(subType, "concrete type to map to");
if (_abstractTypes == null) {
_abstractTypes = new SimpleAbstractTypeResolver();
}
// note: addMapping() will verify arguments
_abstractTypes = _abstractTypes.addMapping(superType, subType);
return this;
}
/**
* Method for adding set of subtypes to be registered with
* {@link ObjectMapper}
* this is an alternative to using annotations in super type to indicate subtypes.
*/
public SimpleModule registerSubtypes(Class<?> ... subtypes)
{
if (_subtypes == null) {
_subtypes = new LinkedHashSet<>();
}
for (Class<?> subtype : subtypes) {
_checkNotNull(subtype, "subtype to register");
_subtypes.add(new NamedType(subtype));
}
return this;
}
/**
* Method for adding set of subtypes (along with type name to use) to be registered with
* {@link ObjectMapper}
* this is an alternative to using annotations in super type to indicate subtypes.
*/
public SimpleModule registerSubtypes(NamedType ... subtypes)
{
if (_subtypes == null) {
_subtypes = new LinkedHashSet<>();
}
for (NamedType subtype : subtypes) {
_checkNotNull(subtype, "subtype to register");
_subtypes.add(subtype);
}
return this;
}
/**
* Method for adding set of subtypes (along with type name to use) to be registered with
* {@link ObjectMapper}
* this is an alternative to using annotations in super type to indicate subtypes.
*/
public SimpleModule registerSubtypes(Collection<Class<?>> subtypes)
{
if (_subtypes == null) {
_subtypes = new LinkedHashSet<>();
}
for (Class<?> subtype : subtypes) {
_checkNotNull(subtype, "subtype to register");
_subtypes.add(new NamedType(subtype));
}
return this;
}
/*
/**********************************************************************
/* Configuration methods, add other handlers
/**********************************************************************
*/
/**
* Method for registering {@link ValueInstantiator} to use when deserializing
* instances of type <code>beanType</code>.
*<p>
* Instantiator is
* registered when module is registered for <code>ObjectMapper</code>.
*/
public SimpleModule addValueInstantiator(Class<?> beanType, ValueInstantiator inst)
{
_checkNotNull(beanType, " | JavaDoc |
java | apache__thrift | lib/java/src/test/java/org/apache/thrift/protocol/TestTProtocolUtil.java | {
"start": 1082,
"end": 3500
} | class ____ {
@Test
public void testGuessProtocolFactory_JSON() throws Exception {
byte[] data = "{foo}".getBytes();
TProtocolFactory factory =
TProtocolUtil.guessProtocolFactory(data, new TCompactProtocol.Factory());
assertTrue(factory instanceof TJSONProtocol.Factory);
// Make sure data serialized with TCompact and which starts with '{'
// is not mistakenly guessed as serialized with JSON.
GuessProtocolStruct s = new GuessProtocolStruct();
s.putToMap_field("}", "}");
byte[] ser = new TSerializer(new TCompactProtocol.Factory()).serialize(s);
factory = TProtocolUtil.guessProtocolFactory(ser, new TCompactProtocol.Factory());
assertFalse(factory instanceof TJSONProtocol.Factory);
}
@Test
public void testGuessProtocolFactory_Binary() throws Exception {
// Check that a last byte != 0 is correctly reported as Binary
byte[] buf = new byte[1];
for (int i = 1; i < 256; i++) {
buf[0] = (byte) i;
TProtocolFactory factory =
TProtocolUtil.guessProtocolFactory(buf, new TCompactProtocol.Factory());
assertTrue(factory instanceof TBinaryProtocol.Factory);
}
// Check that a second byte set to 0 is reported as Binary
buf = new byte[2];
TProtocolFactory factory =
TProtocolUtil.guessProtocolFactory(buf, new TCompactProtocol.Factory());
assertTrue(factory instanceof TBinaryProtocol.Factory);
}
@Test
public void testGuessProtocolFactory_Compact() throws Exception {
// Check that a first byte > 0x10 is reported as Compact
byte[] buf = new byte[3];
buf[0] = 0x11;
TProtocolFactory factory =
TProtocolUtil.guessProtocolFactory(buf, new TBinaryProtocol.Factory());
assertTrue(factory instanceof TCompactProtocol.Factory);
// Check that second byte >= 0x80 is reported as Compact
buf[0] = 0;
for (int i = 0x80; i < 0x100; i++) {
buf[1] = (byte) i;
factory = TProtocolUtil.guessProtocolFactory(buf, new TBinaryProtocol.Factory());
assertTrue(factory instanceof TCompactProtocol.Factory);
}
}
@Test
public void testGuessProtocolFactory_Undecided() throws Exception {
byte[] buf = new byte[3];
buf[1] = 0x7e;
TProtocolFactory factory =
TProtocolUtil.guessProtocolFactory(buf, new TSimpleJSONProtocol.Factory());
assertTrue(factory instanceof TSimpleJSONProtocol.Factory);
}
}
| TestTProtocolUtil |
java | apache__camel | components/camel-kubernetes/src/test/java/org/apache/camel/component/kubernetes/consumer/integration/KubernetesNodesConsumerIT.java | {
"start": 2112,
"end": 3345
} | class ____ extends KubernetesTestSupport {
@EndpointInject("mock:result")
protected MockEndpoint mockResultEndpoint;
@Test
@Order(1)
void listNode() throws Exception {
configureMock();
Exchange ex = template.request("direct:listNode", exchange -> {
});
Message message = ex.getMessage();
assertNotNull(message);
assertNotNull(message.getBody());
mockResultEndpoint.assertIsSatisfied();
}
private void configureMock() {
mockResultEndpoint.expectedMessageCount(1);
mockResultEndpoint.expectedHeaderValuesReceivedInAnyOrder(KubernetesConstants.KUBERNETES_EVENT_ACTION,
"ADDED");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:listNode").toF("kubernetes-nodes://%s?oauthToken=%s&operation=listNodes", host, authToken);
fromF("kubernetes-nodes://%s?oauthToken=%s&operation=listNodes", host, authToken)
.process(new KubernetesProcessor()).to(mockResultEndpoint);
}
};
}
public | KubernetesNodesConsumerIT |
java | spring-projects__spring-boot | module/spring-boot-security-oauth2-client/src/main/java/org/springframework/boot/security/oauth2/client/autoconfigure/servlet/OAuth2ClientWebSecurityAutoConfiguration.java | {
"start": 2814,
"end": 3198
} | class ____ {
@Bean
@ConditionalOnMissingBean
OAuth2AuthorizedClientRepository authorizedClientRepository(OAuth2AuthorizedClientService authorizedClientService) {
return new AuthenticatedPrincipalOAuth2AuthorizedClientRepository(authorizedClientService);
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnDefaultWebSecurity
static | OAuth2ClientWebSecurityAutoConfiguration |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/BlockingFlowableNextTest.java | {
"start": 1357,
"end": 13364
} | class ____ extends RxJavaTest {
private void fireOnNextInNewThread(final FlowableProcessor<String> o, final String value) {
new Thread() {
@Override
public void run() {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// ignore
}
o.onNext(value);
}
}.start();
}
private void fireOnErrorInNewThread(final FlowableProcessor<String> o) {
new Thread() {
@Override
public void run() {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// ignore
}
o.onError(new TestException());
}
}.start();
}
@Test
public void next() {
FlowableProcessor<String> obs = PublishProcessor.create();
Iterator<String> it = obs.blockingNext().iterator();
fireOnNextInNewThread(obs, "one");
assertTrue(it.hasNext());
assertEquals("one", it.next());
fireOnNextInNewThread(obs, "two");
assertTrue(it.hasNext());
assertEquals("two", it.next());
fireOnNextInNewThread(obs, "three");
try {
assertEquals("three", it.next());
} catch (NoSuchElementException e) {
fail("Calling next() without hasNext() should wait for next fire");
}
obs.onComplete();
assertFalse(it.hasNext());
try {
it.next();
fail("At the end of an iterator should throw a NoSuchElementException");
} catch (NoSuchElementException e) {
}
// If the observable is completed, hasNext always returns false and next always throw a NoSuchElementException.
assertFalse(it.hasNext());
try {
it.next();
fail("At the end of an iterator should throw a NoSuchElementException");
} catch (NoSuchElementException e) {
}
}
@Test
public void nextWithError() {
FlowableProcessor<String> obs = PublishProcessor.create();
Iterator<String> it = obs.blockingNext().iterator();
fireOnNextInNewThread(obs, "one");
assertTrue(it.hasNext());
assertEquals("one", it.next());
fireOnErrorInNewThread(obs);
try {
it.hasNext();
fail("Expected an TestException");
} catch (TestException e) {
}
assertErrorAfterObservableFail(it);
}
@Test
public void nextWithEmpty() {
Flowable<String> obs = Flowable.<String> empty().observeOn(Schedulers.newThread());
Iterator<String> it = obs.blockingNext().iterator();
assertFalse(it.hasNext());
try {
it.next();
fail("At the end of an iterator should throw a NoSuchElementException");
} catch (NoSuchElementException e) {
}
// If the observable is completed, hasNext always returns false and next always throw a NoSuchElementException.
assertFalse(it.hasNext());
try {
it.next();
fail("At the end of an iterator should throw a NoSuchElementException");
} catch (NoSuchElementException e) {
}
}
@Test
public void onError() throws Throwable {
FlowableProcessor<String> obs = PublishProcessor.create();
Iterator<String> it = obs.blockingNext().iterator();
obs.onError(new TestException());
try {
it.hasNext();
fail("Expected an TestException");
} catch (TestException e) {
// successful
}
assertErrorAfterObservableFail(it);
}
@Test
public void onErrorInNewThread() {
FlowableProcessor<String> obs = PublishProcessor.create();
Iterator<String> it = obs.blockingNext().iterator();
fireOnErrorInNewThread(obs);
try {
it.hasNext();
fail("Expected an TestException");
} catch (TestException e) {
// successful
}
assertErrorAfterObservableFail(it);
}
private void assertErrorAfterObservableFail(Iterator<String> it) {
// After the observable fails, hasNext and next always throw the exception.
try {
it.hasNext();
fail("hasNext should throw a TestException");
} catch (TestException e) {
}
try {
it.next();
fail("next should throw a TestException");
} catch (TestException e) {
}
}
@Test
public void nextWithOnlyUsingNextMethod() {
FlowableProcessor<String> obs = PublishProcessor.create();
Iterator<String> it = obs.blockingNext().iterator();
fireOnNextInNewThread(obs, "one");
assertEquals("one", it.next());
fireOnNextInNewThread(obs, "two");
assertEquals("two", it.next());
obs.onComplete();
try {
it.next();
fail("At the end of an iterator should throw a NoSuchElementException");
} catch (NoSuchElementException e) {
}
}
@Test
public void nextWithCallingHasNextMultipleTimes() {
FlowableProcessor<String> obs = PublishProcessor.create();
Iterator<String> it = obs.blockingNext().iterator();
fireOnNextInNewThread(obs, "one");
assertTrue(it.hasNext());
assertTrue(it.hasNext());
assertTrue(it.hasNext());
assertTrue(it.hasNext());
assertEquals("one", it.next());
obs.onComplete();
try {
it.next();
fail("At the end of an iterator should throw a NoSuchElementException");
} catch (NoSuchElementException e) {
}
}
/**
* Confirm that no buffering or blocking of the Observable onNext calls occurs and it just grabs the next emitted value.
* <p>
* This results in output such as {@code => a: 1 b: 2 c: 89}
*
* @throws Throwable some method call is declared throws
*/
@Test
public void noBufferingOrBlockingOfSequence() throws Throwable {
int repeat = 0;
for (;;) {
final SerialDisposable task = new SerialDisposable();
try {
final CountDownLatch finished = new CountDownLatch(1);
final int COUNT = 30;
final CountDownLatch timeHasPassed = new CountDownLatch(COUNT);
final AtomicBoolean running = new AtomicBoolean(true);
final AtomicInteger count = new AtomicInteger(0);
final Flowable<Integer> obs = Flowable.unsafeCreate(new Publisher<Integer>() {
@Override
public void subscribe(final Subscriber<? super Integer> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
task.replace(Schedulers.single().scheduleDirect(new Runnable() {
@Override
public void run() {
try {
while (running.get() && !task.isDisposed()) {
subscriber.onNext(count.incrementAndGet());
timeHasPassed.countDown();
}
subscriber.onComplete();
} catch (Throwable e) {
subscriber.onError(e);
} finally {
finished.countDown();
}
}
}));
}
});
Iterator<Integer> it = obs.blockingNext().iterator();
assertTrue(it.hasNext());
int a = it.next();
assertTrue(it.hasNext());
int b = it.next();
// we should have a different value
assertTrue("a and b should be different", a != b);
// wait for some time (if times out we are blocked somewhere so fail ... set very high for very slow, constrained machines)
timeHasPassed.await(8000, TimeUnit.MILLISECONDS);
assertTrue(it.hasNext());
int c = it.next();
assertTrue("c should not just be the next in sequence", c != (b + 1));
assertTrue("expected that c [" + c + "] is higher than or equal to " + COUNT, c >= COUNT);
assertTrue(it.hasNext());
int d = it.next();
assertTrue(d > c);
// shut down the thread
running.set(false);
finished.await();
assertFalse(it.hasNext());
System.out.println("a: " + a + " b: " + b + " c: " + c);
break;
} catch (AssertionError ex) {
if (++repeat == 3) {
throw ex;
}
Thread.sleep((int)(1000 * Math.pow(2, repeat - 1)));
} finally {
task.dispose();
}
}
}
@Test
public void singleSourceManyIterators() throws InterruptedException {
Flowable<Long> f = Flowable.interval(250, TimeUnit.MILLISECONDS);
PublishProcessor<Integer> terminal = PublishProcessor.create();
Flowable<Long> source = f.takeUntil(terminal);
Iterable<Long> iter = source.blockingNext();
for (int j = 0; j < 3; j++) {
BlockingFlowableNext.NextIterator<Long> it = (BlockingFlowableNext.NextIterator<Long>)iter.iterator();
for (long i = 0; i < 10; i++) {
Assert.assertTrue(it.hasNext());
Assert.assertEquals(j + "th iteration next", Long.valueOf(i), it.next());
}
terminal.onNext(1);
}
}
@Test
public void synchronousNext() {
assertEquals(1, BehaviorProcessor.createDefault(1).take(1).blockingSingle().intValue());
assertEquals(2, BehaviorProcessor.createDefault(2).blockingIterable().iterator().next().intValue());
assertEquals(3, BehaviorProcessor.createDefault(3).blockingNext().iterator().next().intValue());
}
@Test(expected = UnsupportedOperationException.class)
public void remove() {
Flowable.never().blockingNext().iterator().remove();
}
@Test
public void interrupt() {
Iterator<Object> it = Flowable.never().blockingNext().iterator();
try {
Thread.currentThread().interrupt();
it.next();
} catch (RuntimeException ex) {
assertTrue(ex.toString(), ex.getCause() instanceof InterruptedException);
}
}
@Test
public void nextObserverError() {
NextSubscriber<Integer> no = new NextSubscriber<>();
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
no.onError(new TestException());
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void nextObserverOnNext() throws Exception {
NextSubscriber<Integer> no = new NextSubscriber<>();
no.setWaiting();
no.onNext(Notification.createOnNext(1));
no.setWaiting();
no.onNext(Notification.createOnNext(1));
assertEquals(1, no.takeNext().getValue().intValue());
}
@Test
public void nextObserverOnCompleteOnNext() throws Exception {
NextSubscriber<Integer> no = new NextSubscriber<>();
no.setWaiting();
no.onNext(Notification.<Integer>createOnComplete());
no.setWaiting();
no.onNext(Notification.createOnNext(1));
assertTrue(no.takeNext().isOnComplete());
}
}
| BlockingFlowableNextTest |
java | apache__camel | components/camel-milo/src/main/java/org/apache/camel/component/milo/NodeIds.java | {
"start": 1419,
"end": 1464
} | class ____ work with node IDs
*/
public final | to |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/state/internals/MergedSortedCacheWrappedWindowStoreIteratorTest.java | {
"start": 2294,
"end": 2945
} | interface ____<K> {
Bytes serialize(final K key, final long ts, final int seq, final StateSerdes<K, ?> serdes);
}
private final List<KeyValue<Long, byte[]>> windowStoreKvPairs = new ArrayList<>();
private final ThreadCache cache = new ThreadCache(new LogContext("testCache "), 1000000L, new MockStreamsMetrics(new Metrics()));
private final String namespace = "0.0-one";
private final StateSerdes<String, String> stateSerdes = new StateSerdes<>("foo", Serdes.String(), Serdes.String());
private Function<byte[], Long> tsExtractor;
private StoreKeySerializer<String> storeKeySerializer;
private | StoreKeySerializer |
java | apache__flink | flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/EventComparator.java | {
"start": 997,
"end": 1098
} | interface ____<T> extends Comparator<T>, Serializable {
long serialVersionUID = 1L;
}
| EventComparator |
java | apache__camel | components/camel-spring-parent/camel-spring-ws/src/main/java/org/apache/camel/component/spring/ws/bean/CamelDirectSender.java | {
"start": 1276,
"end": 2072
} | class ____ implements WebServiceMessageSender {
private CamelContext camelContext;
@Override
public WebServiceConnection createConnection(URI uri) throws IOException {
try {
return new CamelDirectConnection(camelContext, uri);
} catch (URISyntaxException e) {
throw new IOException(e);
}
}
@Override
public boolean supports(URI uri) {
try {
new CamelDirectConnection(camelContext, uri);
return true;
} catch (URISyntaxException e) {
return false;
}
}
public CamelContext getCamelContext() {
return camelContext;
}
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
}
| CamelDirectSender |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/ext/ExternalTypeId96Test.java | {
"start": 689,
"end": 779
} | class ____ extends DatabindTestUtil
{
// for [databind#96]
static | ExternalTypeId96Test |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/ExtensionLoader.java | {
"start": 12217,
"end": 58130
} | class
____<BuildContext, Object> stepInstanceSetup = Functions.discardingBiConsumer();
if (constructors.length != 1) {
throw reportError(clazz, "Build step classes must have exactly one constructor");
}
SmallRyeConfig config = ConfigProvider.getConfig().unwrap(SmallRyeConfig.class);
ExtensionLoaderConfig extensionLoaderConfig = config.getConfigMapping(ExtensionLoaderConfig.class);
EnumSet<ConfigPhase> consumingConfigPhases = EnumSet.noneOf(ConfigPhase.class);
final Constructor<?> constructor = constructors[0];
if (!(Modifier.isPublic(constructor.getModifiers())))
constructor.setAccessible(true);
final Parameter[] ctorParameters = constructor.getParameters();
final List<Function<BuildContext, Object>> ctorParamFns;
if (ctorParameters.length == 0) {
ctorParamFns = Collections.emptyList();
} else {
ctorParamFns = new ArrayList<>(ctorParameters.length);
for (Parameter parameter : ctorParameters) {
Type parameterType = parameter.getParameterizedType();
final Class<?> parameterClass = parameter.getType();
if (rawTypeExtends(parameterType, SimpleBuildItem.class)) {
final Class<? extends SimpleBuildItem> buildItemClass = rawTypeOf(parameterType)
.asSubclass(SimpleBuildItem.class);
stepConfig = stepConfig.andThen(bsb -> bsb.consumes(buildItemClass));
ctorParamFns.add(bc -> bc.consume(buildItemClass));
} else if (isAnEmptyBuildItemConsumer(parameterType)) {
throw reportError(parameter,
"Cannot consume an empty build item, use @Consume(class) on the constructor instead");
} else if (isAnEmptyBuildItemProducer(parameterType)) {
throw reportError(parameter,
"Cannot produce an empty build item, use @Produce(class) on the constructor instead");
} else if (isListOf(parameterType, MultiBuildItem.class)) {
final Class<? extends MultiBuildItem> buildItemClass = rawTypeOfParameter(parameterType, 0)
.asSubclass(MultiBuildItem.class);
stepConfig = stepConfig.andThen(bsb -> bsb.consumes(buildItemClass));
ctorParamFns.add(bc -> bc.consumeMulti(buildItemClass));
} else if (isConsumerOf(parameterType, BuildItem.class)
|| isBuildProducerOf(parameterType, BuildItem.class)) {
throw unsupportedConstructorOrFieldProducer(parameter);
} else if (isOptionalOf(parameterType, SimpleBuildItem.class)) {
final Class<? extends SimpleBuildItem> buildItemClass = rawTypeOfParameter(parameterType, 0)
.asSubclass(SimpleBuildItem.class);
stepConfig = stepConfig.andThen(bsb -> bsb.consumes(buildItemClass, ConsumeFlags.of(ConsumeFlag.OPTIONAL)));
ctorParamFns.add(bc -> Optional.ofNullable(bc.consume(buildItemClass)));
} else if (isSupplierOf(parameterType, SimpleBuildItem.class)) {
final Class<? extends SimpleBuildItem> buildItemClass = rawTypeOfParameter(parameterType, 0)
.asSubclass(SimpleBuildItem.class);
stepConfig = stepConfig.andThen(bsb -> bsb.consumes(buildItemClass));
ctorParamFns.add(bc -> (Supplier<? extends SimpleBuildItem>) () -> bc.consume(buildItemClass));
} else if (isSupplierOfOptionalOf(parameterType, SimpleBuildItem.class)) {
final Class<? extends SimpleBuildItem> buildItemClass = rawTypeOfParameter(
rawTypeOfParameter(parameterType, 0), 0).asSubclass(SimpleBuildItem.class);
stepConfig = stepConfig.andThen(bsb -> bsb.consumes(buildItemClass, ConsumeFlags.of(ConsumeFlag.OPTIONAL)));
ctorParamFns.add(bc -> (Supplier<Optional<? extends SimpleBuildItem>>) () -> Optional
.ofNullable(bc.consume(buildItemClass)));
} else if (rawTypeOf(parameterType) == Executor.class) {
ctorParamFns.add(BuildContext::getExecutor);
} else if (parameterClass.isAnnotationPresent(ConfigRoot.class)) {
final ConfigRoot annotation = parameterClass.getAnnotation(ConfigRoot.class);
final ConfigPhase phase = annotation.phase();
consumingConfigPhases.add(phase);
if (phase.isAvailableAtBuild()) {
ctorParamFns.add(buildContext -> config.getConfigMapping(parameterClass));
if (phase == ConfigPhase.BUILD_AND_RUN_TIME_FIXED) {
runTimeProxies.computeIfAbsent(parameterClass, config::getConfigMapping);
}
} else if (phase.isReadAtMain()) {
throw reportError(parameter, phase + " configuration cannot be consumed here");
} else {
throw reportError(parameterClass, "Unknown value for ConfigPhase");
}
} else if (isRecorder(parameterClass)) {
throw reportError(parameter, "Bytecode recorders disallowed on constructor parameters");
} else {
throw reportError(parameter, "Unsupported constructor parameter type " + parameterType);
}
}
}
// index fields
final Field[] fields = clazz.getDeclaredFields();
for (Field field : fields) {
final int mods = field.getModifiers();
if (Modifier.isStatic(mods)) {
// ignore static fields
continue;
}
if (Modifier.isFinal(mods)) {
// ignore final fields
continue;
}
if (!Modifier.isPublic(mods) || !Modifier.isPublic(field.getDeclaringClass().getModifiers())) {
field.setAccessible(true);
}
// next, determine the type
final Type fieldType = field.getGenericType();
final Class<?> fieldClass = field.getType();
if (rawTypeExtends(fieldType, SimpleBuildItem.class)) {
final Class<? extends SimpleBuildItem> buildItemClass = rawTypeOf(fieldType).asSubclass(SimpleBuildItem.class);
stepConfig = stepConfig.andThen(bsb -> bsb.consumes(buildItemClass));
stepInstanceSetup = stepInstanceSetup
.andThen((bc, o) -> ReflectUtil.setFieldVal(field, o, bc.consume(buildItemClass)));
} else if (isAnEmptyBuildItemConsumer(fieldType)) {
throw reportError(field, "Cannot consume an empty build item, use @Consume(class) on the field instead");
} else if (isAnEmptyBuildItemProducer(fieldType)) {
throw reportError(field, "Cannot produce an empty build item, use @Produce(class) on the field instead");
} else if (isListOf(fieldType, MultiBuildItem.class)) {
final Class<? extends MultiBuildItem> buildItemClass = rawTypeOfParameter(fieldType, 0)
.asSubclass(MultiBuildItem.class);
stepConfig = stepConfig.andThen(bsb -> bsb.consumes(buildItemClass));
stepInstanceSetup = stepInstanceSetup
.andThen((bc, o) -> ReflectUtil.setFieldVal(field, o, bc.consumeMulti(buildItemClass)));
} else if (isConsumerOf(fieldType, BuildItem.class)
|| isBuildProducerOf(fieldType, BuildItem.class)) {
throw unsupportedConstructorOrFieldProducer(field);
} else if (isOptionalOf(fieldType, SimpleBuildItem.class)) {
final Class<? extends SimpleBuildItem> buildItemClass = rawTypeOfParameter(fieldType, 0)
.asSubclass(SimpleBuildItem.class);
stepConfig = stepConfig.andThen(bsb -> bsb.consumes(buildItemClass, ConsumeFlags.of(ConsumeFlag.OPTIONAL)));
stepInstanceSetup = stepInstanceSetup
.andThen((bc, o) -> ReflectUtil.setFieldVal(field, o, Optional.ofNullable(bc.consume(buildItemClass))));
} else if (isSupplierOf(fieldType, SimpleBuildItem.class)) {
final Class<? extends SimpleBuildItem> buildItemClass = rawTypeOfParameter(fieldType, 0)
.asSubclass(SimpleBuildItem.class);
stepConfig = stepConfig.andThen(bsb -> bsb.consumes(buildItemClass));
stepInstanceSetup = stepInstanceSetup.andThen((bc, o) -> ReflectUtil.setFieldVal(field, o,
(Supplier<? extends SimpleBuildItem>) () -> bc.consume(buildItemClass)));
} else if (isSupplierOfOptionalOf(fieldType, SimpleBuildItem.class)) {
final Class<? extends SimpleBuildItem> buildItemClass = rawTypeOfParameter(rawTypeOfParameter(fieldType, 0), 0)
.asSubclass(SimpleBuildItem.class);
stepConfig = stepConfig.andThen(bsb -> bsb.consumes(buildItemClass, ConsumeFlags.of(ConsumeFlag.OPTIONAL)));
stepInstanceSetup = stepInstanceSetup.andThen((bc, o) -> ReflectUtil.setFieldVal(field, o,
(Supplier<Optional<? extends SimpleBuildItem>>) () -> Optional.ofNullable(bc.consume(buildItemClass))));
} else if (fieldClass == Executor.class) {
stepInstanceSetup = stepInstanceSetup.andThen((bc, o) -> ReflectUtil.setFieldVal(field, o, bc.getExecutor()));
} else if (fieldClass.isAnnotationPresent(ConfigRoot.class)) {
final ConfigRoot annotation = fieldClass.getAnnotation(ConfigRoot.class);
final ConfigPhase phase = annotation.phase();
consumingConfigPhases.add(phase);
if (phase.isAvailableAtBuild()) {
stepInstanceSetup = stepInstanceSetup.andThen((bc, o) -> {
ReflectUtil.setFieldVal(field, o, config.getConfigMapping(fieldClass));
});
if (phase == ConfigPhase.BUILD_AND_RUN_TIME_FIXED) {
runTimeProxies.computeIfAbsent(fieldClass, config::getConfigMapping);
}
} else if (phase.isReadAtMain()) {
throw reportError(field, phase + " configuration cannot be consumed here");
} else {
throw reportError(fieldClass, "Unknown value for ConfigPhase");
}
} else if (isRecorder(fieldClass)) {
throw reportError(field, "Bytecode recorders disallowed on fields");
} else {
throw reportError(field, "Unsupported field type " + fieldType);
}
}
// get class-level configuration, if any
final BuildSteps buildSteps = clazz.getAnnotation(BuildSteps.class);
final Class<? extends BooleanSupplier>[] classOnlyIf = buildSteps == null ? EMPTY_BOOLEAN_SUPPLIER_CLASS_ARRAY
: buildSteps.onlyIf();
final Class<? extends BooleanSupplier>[] classOnlyIfNot = buildSteps == null ? EMPTY_BOOLEAN_SUPPLIER_CLASS_ARRAY
: buildSteps.onlyIfNot();
// now iterate the methods
final List<Method> methods = getMethods(clazz);
final Map<String, List<Method>> nameToMethods = methods.stream().collect(Collectors.groupingBy(m -> m.getName()));
MethodHandles.Lookup lookup = MethodHandles.publicLookup();
for (Method method : methods) {
final BuildStep buildStep = method.getAnnotation(BuildStep.class);
if (buildStep == null) {
continue;
}
if (Modifier.isStatic(method.getModifiers())) {
throw new RuntimeException("A build step must be a non-static method: " + method);
}
if (!Modifier.isPublic(method.getModifiers()) || !Modifier.isPublic(method.getDeclaringClass().getModifiers())) {
method.setAccessible(true);
}
final Class<? extends BooleanSupplier>[] onlyIf = buildStep.onlyIf();
final Class<? extends BooleanSupplier>[] onlyIfNot = buildStep.onlyIfNot();
final Parameter[] methodParameters = method.getParameters();
final Record recordAnnotation = method.getAnnotation(Record.class);
final boolean isRecorder = recordAnnotation != null;
final boolean identityComparison = isRecorder ? recordAnnotation.useIdentityComparisonForParameters() : true;
if (isRecorder) {
boolean recorderFound = false;
for (Class<?> p : method.getParameterTypes()) {
if (isRecorder(p)) {
recorderFound = true;
break;
}
}
if (!recorderFound) {
throw new RuntimeException(method + " is marked @Record but does not inject an @Recorder object");
}
}
final List<BiFunction<BuildContext, BytecodeRecorderImpl, Object>> methodParamFns;
Consumer<BuildStepBuilder> methodStepConfig = Functions.discardingConsumer();
BooleanSupplier addStep = () -> true;
addStep = and(addStep, supplierFactory, classOnlyIf, false);
addStep = and(addStep, supplierFactory, classOnlyIfNot, true);
addStep = and(addStep, supplierFactory, onlyIf, false);
addStep = and(addStep, supplierFactory, onlyIfNot, true);
final BooleanSupplier finalAddStep = addStep;
if (isRecorder) {
assert recordAnnotation != null;
final ExecutionTime executionTime = recordAnnotation.value();
final boolean optional = recordAnnotation.optional();
methodStepConfig = methodStepConfig.andThen(bsb -> {
bsb
.produces(
executionTime == ExecutionTime.STATIC_INIT ? StaticBytecodeRecorderBuildItem.class
: MainBytecodeRecorderBuildItem.class,
optional ? ProduceFlags.of(ProduceFlag.WEAK) : ProduceFlags.NONE);
});
}
EnumSet<ConfigPhase> methodConsumingConfigPhases = consumingConfigPhases.clone();
if (methodParameters.length == 0) {
methodParamFns = Collections.emptyList();
} else {
methodParamFns = new ArrayList<>(methodParameters.length);
for (Parameter parameter : methodParameters) {
final boolean weak = parameter.isAnnotationPresent(Weak.class);
final boolean overridable = parameter.isAnnotationPresent(Overridable.class);
final Type parameterType = parameter.getParameterizedType();
final Class<?> parameterClass = parameter.getType();
if (rawTypeExtends(parameterType, SimpleBuildItem.class)) {
final Class<? extends SimpleBuildItem> buildItemClass = parameterClass
.asSubclass(SimpleBuildItem.class);
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.consumes(buildItemClass));
methodParamFns.add((bc, bri) -> bc.consume(buildItemClass));
} else if (isAnEmptyBuildItemConsumer(parameterType)) {
throw reportError(parameter,
"Cannot consume an empty build item, use @Consume(class) on the build step method instead");
} else if (isAnEmptyBuildItemProducer(parameterType)) {
throw reportError(parameter,
"Cannot produce an empty build item, use @Produce(class) on the build step method instead");
} else if (isListOf(parameterType, MultiBuildItem.class)) {
final Class<? extends MultiBuildItem> buildItemClass = rawTypeOfParameter(parameterType, 0)
.asSubclass(MultiBuildItem.class);
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.consumes(buildItemClass));
methodParamFns.add((bc, bri) -> bc.consumeMulti(buildItemClass));
} else if (isConsumerOf(parameterType, BuildItem.class)) {
final Class<? extends BuildItem> buildItemClass = rawTypeOfParameter(parameterType, 0)
.asSubclass(BuildItem.class);
if (overridable) {
if (weak) {
methodStepConfig = methodStepConfig.andThen(
bsb -> bsb.produces(buildItemClass, ProduceFlag.OVERRIDABLE, ProduceFlag.WEAK));
} else {
methodStepConfig = methodStepConfig
.andThen(bsb -> bsb.produces(buildItemClass, ProduceFlag.OVERRIDABLE));
}
} else {
if (weak) {
methodStepConfig = methodStepConfig
.andThen(bsb -> bsb.produces(buildItemClass, ProduceFlag.WEAK));
} else {
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.produces(buildItemClass));
}
}
methodParamFns.add((bc, bri) -> (Consumer<? extends BuildItem>) bc::produce);
} else if (isBuildProducerOf(parameterType, BuildItem.class)) {
final Class<? extends BuildItem> buildItemClass = rawTypeOfParameter(parameterType, 0)
.asSubclass(BuildItem.class);
if (overridable) {
if (weak) {
methodStepConfig = methodStepConfig.andThen(
bsb -> bsb.produces(buildItemClass, ProduceFlag.OVERRIDABLE, ProduceFlag.WEAK));
} else {
methodStepConfig = methodStepConfig
.andThen(bsb -> bsb.produces(buildItemClass, ProduceFlag.OVERRIDABLE));
}
} else {
if (weak) {
methodStepConfig = methodStepConfig
.andThen(bsb -> bsb.produces(buildItemClass, ProduceFlag.WEAK));
} else {
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.produces(buildItemClass));
}
}
methodParamFns.add((bc, bri) -> (BuildProducer<? extends BuildItem>) bc::produce);
} else if (isOptionalOf(parameterType, SimpleBuildItem.class)) {
final Class<? extends SimpleBuildItem> buildItemClass = rawTypeOfParameter(parameterType, 0)
.asSubclass(SimpleBuildItem.class);
methodStepConfig = methodStepConfig
.andThen(bsb -> bsb.consumes(buildItemClass, ConsumeFlags.of(ConsumeFlag.OPTIONAL)));
methodParamFns.add((bc, bri) -> Optional.ofNullable(bc.consume(buildItemClass)));
} else if (isSupplierOf(parameterType, SimpleBuildItem.class)) {
final Class<? extends SimpleBuildItem> buildItemClass = rawTypeOfParameter(parameterType, 0)
.asSubclass(SimpleBuildItem.class);
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.consumes(buildItemClass));
methodParamFns.add((bc, bri) -> (Supplier<? extends SimpleBuildItem>) () -> bc.consume(buildItemClass));
} else if (isSupplierOfOptionalOf(parameterType, SimpleBuildItem.class)) {
final Class<? extends SimpleBuildItem> buildItemClass = rawTypeOfParameter(
rawTypeOfParameter(parameterType, 0), 0).asSubclass(SimpleBuildItem.class);
methodStepConfig = methodStepConfig
.andThen(bsb -> bsb.consumes(buildItemClass, ConsumeFlags.of(ConsumeFlag.OPTIONAL)));
methodParamFns.add((bc, bri) -> (Supplier<Optional<? extends SimpleBuildItem>>) () -> Optional
.ofNullable(bc.consume(buildItemClass)));
} else if (rawTypeOf(parameterType) == Executor.class
|| rawTypeOf(parameterType) == ExecutorService.class) {
methodParamFns.add((bc, bri) -> bc.getExecutor());
} else if (parameterClass.isAnnotationPresent(ConfigRoot.class)) {
final ConfigRoot annotation = parameterClass.getAnnotation(ConfigRoot.class);
final ConfigPhase phase = annotation.phase();
methodConsumingConfigPhases.add(phase);
if (phase.isAvailableAtBuild()) {
methodParamFns.add((bc, bri) -> config.getConfigMapping(parameterClass));
if (isRecorder && phase == ConfigPhase.BUILD_AND_RUN_TIME_FIXED) {
runTimeProxies.computeIfAbsent(parameterClass, config::getConfigMapping);
}
} else if (phase.isReadAtMain()) {
if (isRecorder) {
if (extensionLoaderConfig.reportRuntimeConfigAtDeployment().equals(warn)) {
methodParamFns.add((bc, bri) -> {
RunTimeConfigurationProxyBuildItem proxies = bc
.consume(RunTimeConfigurationProxyBuildItem.class);
return proxies.getProxyObjectFor(parameterClass);
});
loadLog.warn(reportError(parameter,
phase + " configuration should not be consumed in Build Steps, use RuntimeValue<"
+ parameter.getType().getTypeName()
+ "> in a @Recorder constructor instead")
.getMessage());
runTimeProxies.computeIfAbsent(parameterClass, ConfigMappingUtils::newInstance);
} else {
throw reportError(parameter,
phase + " configuration cannot be consumed in Build Steps, use RuntimeValue<"
+ parameter.getType().getTypeName()
+ "> in a @Recorder constructor instead");
}
} else {
throw reportError(parameter,
phase + " configuration cannot be consumed here unless the method is a @Recorder");
}
} else {
throw reportError(parameterClass, "Unknown value for ConfigPhase");
}
} else if (isRecorder(parameter.getType())) {
if (!isRecorder) {
throw reportError(parameter,
"Cannot pass recorders to method which is not annotated with " + Record.class);
}
methodParamFns.add((bc, bri) -> {
assert bri != null;
return bri.getRecordingProxy(parameterClass);
});
//now look for recorder parameter injection
//as we now inject config directly into recorders we need to look at the constructor params
Constructor<?>[] ctors = parameter.getType().getDeclaredConstructors();
for (var ctor : ctors) {
if (ctors.length == 1 || ctor.isAnnotationPresent(Inject.class)) {
for (var type : ctor.getGenericParameterTypes()) {
Class<?> theType;
boolean isRuntimeValue = false;
if (type instanceof ParameterizedType pt) {
if (pt.getRawType().equals(RuntimeValue.class)) {
theType = (Class<?>) pt.getActualTypeArguments()[0];
isRuntimeValue = true;
} else {
throw new RuntimeException("Unknown recorder constructor parameter: " + type
+ " in recorder " + parameter.getType());
}
} else {
theType = (Class<?>) type;
}
ConfigRoot annotation = theType.getAnnotation(ConfigRoot.class);
if (annotation != null) {
if (recordAnnotation.value() == ExecutionTime.STATIC_INIT) {
// TODO - Check for runtime config is done in another place, we may want to make things more consistent. Rewrite once we disallow the injection of runtime objects in build steps
methodConsumingConfigPhases.add(ConfigPhase.BUILD_AND_RUN_TIME_FIXED);
} else {
methodConsumingConfigPhases.add(annotation.phase());
if (annotation.phase().isReadAtMain() && !isRuntimeValue) {
if (extensionLoaderConfig.reportRuntimeConfigAtDeployment().equals(warn)) {
loadLog.warn(reportError(parameter, annotation.phase() + " configuration "
+ type.getTypeName()
+ " should be injected in a @Recorder constructor as a RuntimeValue<"
+ type.getTypeName() + ">").getMessage());
} else {
throw reportError(parameter, annotation.phase() + " configuration "
+ type.getTypeName()
+ " can only be injected in a @Recorder constructor as a RuntimeValue<"
+ type.getTypeName() + ">");
}
}
}
if (annotation.phase().isReadAtMain()) {
// TODO - Remove once we disallow the injection of runtime objects in build steps
runTimeProxies.computeIfAbsent(theType, ConfigMappingUtils::newInstance);
} else {
runTimeProxies.computeIfAbsent(theType, config::getConfigMapping);
}
}
}
}
}
} else if (parameter.getType() == RecorderContext.class
|| parameter.getType() == BytecodeRecorderImpl.class) {
if (!isRecorder) {
throw reportError(parameter,
"Cannot pass recorder context to method which is not annotated with " + Record.class);
}
methodParamFns.add((bc, bri) -> bri);
} else {
throw reportError(parameter, "Unsupported method parameter " + parameterType);
}
}
}
final BiConsumer<BuildContext, Object> resultConsumer;
final Type returnType = method.getGenericReturnType();
final boolean weak = method.isAnnotationPresent(Weak.class);
final boolean overridable = method.isAnnotationPresent(Overridable.class);
if (rawTypeIs(returnType, void.class)) {
resultConsumer = Functions.discardingBiConsumer();
} else if (rawTypeExtends(returnType, EmptyBuildItem.class) || isOptionalOf(returnType, EmptyBuildItem.class)) {
throw reportError(method,
"Cannot produce an empty build item, use @Produce(class) on the build step method instead");
} else if (rawTypeExtends(returnType, BuildItem.class)) {
final Class<? extends BuildItem> type = method.getReturnType().asSubclass(BuildItem.class);
if (overridable) {
if (weak) {
methodStepConfig = methodStepConfig
.andThen(bsb -> bsb.produces(type, ProduceFlag.OVERRIDABLE, ProduceFlag.WEAK));
} else {
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.produces(type, ProduceFlag.OVERRIDABLE));
}
} else {
if (weak) {
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.produces(type, ProduceFlag.WEAK));
} else {
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.produces(type));
}
}
resultConsumer = (bc, o) -> {
if (o != null)
bc.produce((BuildItem) o);
};
} else if (isOptionalOf(returnType, BuildItem.class)) {
final Class<? extends BuildItem> type = rawTypeOfParameter(returnType, 0).asSubclass(BuildItem.class);
if (overridable) {
if (weak) {
methodStepConfig = methodStepConfig
.andThen(bsb -> bsb.produces(type, ProduceFlag.OVERRIDABLE, ProduceFlag.WEAK));
} else {
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.produces(type, ProduceFlag.OVERRIDABLE));
}
} else {
if (weak) {
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.produces(type, ProduceFlag.WEAK));
} else {
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.produces(type));
}
}
resultConsumer = (bc, o) -> ((Optional<? extends BuildItem>) o).ifPresent(bc::produce);
} else if (isListOf(returnType, MultiBuildItem.class)) {
final Class<? extends MultiBuildItem> type = rawTypeOfParameter(returnType, 0).asSubclass(MultiBuildItem.class);
if (overridable) {
if (weak) {
methodStepConfig = methodStepConfig
.andThen(bsb -> bsb.produces(type, ProduceFlag.OVERRIDABLE, ProduceFlag.WEAK));
} else {
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.produces(type, ProduceFlag.OVERRIDABLE));
}
} else {
if (weak) {
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.produces(type, ProduceFlag.WEAK));
} else {
methodStepConfig = methodStepConfig.andThen(bsb -> bsb.produces(type));
}
}
resultConsumer = (bc, o) -> {
if (o != null)
bc.produce((List<? extends MultiBuildItem>) o);
};
} else {
throw reportError(method, "Unsupported method return type " + returnType);
}
if (methodConsumingConfigPhases.contains(ConfigPhase.RUN_TIME)) {
if (isRecorder && recordAnnotation.value() == ExecutionTime.STATIC_INIT) {
throw reportError(method,
"Bytecode recorder is static but an injected config object is declared as run time");
}
methodStepConfig = methodStepConfig
.andThen(bsb -> bsb.consumes(RunTimeConfigurationProxyBuildItem.class));
if (methodConsumingConfigPhases.contains(ConfigPhase.RUN_TIME)) {
methodStepConfig = methodStepConfig
.andThen(bsb -> bsb.afterProduce(RuntimeConfigSetupCompleteBuildItem.class));
}
}
if (methodConsumingConfigPhases.contains(ConfigPhase.BUILD_AND_RUN_TIME_FIXED)
|| methodConsumingConfigPhases.contains(ConfigPhase.BUILD_TIME)) {
methodStepConfig = methodStepConfig
.andThen(bsb -> bsb.consumes(ConfigurationBuildItem.class));
}
final Consume[] consumes = method.getAnnotationsByType(Consume.class);
if (consumes.length > 0) {
methodStepConfig = methodStepConfig.andThen(bsb -> {
for (Consume consume : consumes) {
bsb.afterProduce(consume.value());
}
});
}
final Produce[] produces = method.getAnnotationsByType(Produce.class);
if (produces.length > 0) {
methodStepConfig = methodStepConfig.andThen(bsb -> {
for (Produce produce : produces) {
bsb.beforeConsume(produce.value());
}
});
}
final ProduceWeak[] produceWeaks = method.getAnnotationsByType(ProduceWeak.class);
if (produceWeaks.length > 0) {
methodStepConfig = methodStepConfig.andThen(bsb -> {
for (ProduceWeak produceWeak : produceWeaks) {
bsb.beforeConsume(produceWeak.value(), ProduceFlag.WEAK);
}
});
}
final Consumer<BuildStepBuilder> finalStepConfig = stepConfig.andThen(methodStepConfig)
.andThen(buildStepBuilder -> buildStepBuilder.buildIf(finalAddStep));
final BiConsumer<BuildContext, Object> finalStepInstanceSetup = stepInstanceSetup;
final String name = clazz.getName() + "#" + method.getName();
final String stepId;
List<Method> methodsWithName = nameToMethods.get(method.getName());
if (methodsWithName.size() > 1) {
// Append the sha1 of the parameter types to resolve the ambiguity
stepId = name + "_" + HashUtil.sha1(Arrays.toString(method.getParameterTypes()));
loadLog.debugf("Build steps with ambiguous name detected: %s, using discriminator suffix for step id: %s", name,
stepId);
} else {
stepId = name;
}
MethodHandle methodHandle = unreflect(method, lookup);
chainConfig = chainConfig
.andThen(bcb -> {
BuildStepBuilder bsb = bcb.addBuildStep(new io.quarkus.builder.BuildStep() {
public void execute(final BuildContext bc) {
Object[] ctorArgs = new Object[ctorParamFns.size()];
for (int i = 0; i < ctorArgs.length; i++) {
ctorArgs[i] = ctorParamFns.get(i).apply(bc);
}
Object instance;
try {
instance = constructor.newInstance(ctorArgs);
} catch (InstantiationException e) {
throw ReflectUtil.toError(e);
} catch (IllegalAccessException e) {
throw ReflectUtil.toError(e);
} catch (InvocationTargetException e) {
try {
throw e.getCause();
} catch (RuntimeException | Error e2) {
throw e2;
} catch (Throwable t) {
throw new IllegalStateException(t);
}
}
finalStepInstanceSetup.accept(bc, instance);
Object[] methodArgs = new Object[methodParamFns.size()];
BytecodeRecorderImpl bri = isRecorder
? new BytecodeRecorderImpl(recordAnnotation.value() == ExecutionTime.STATIC_INIT,
clazz.getSimpleName(), method.getName(),
Integer.toString(Math.abs(method.toString().hashCode())), identityComparison,
s -> {
if (s instanceof Class) {
var cfg = ((Class<?>) s).getAnnotation(ConfigRoot.class);
if (cfg == null
|| (cfg.phase() != ConfigPhase.BUILD_AND_RUN_TIME_FIXED
&& recordAnnotation
.value() == ExecutionTime.STATIC_INIT)) {
throw new RuntimeException(
"Can only inject BUILD_AND_RUN_TIME_FIXED objects into a constructor, use RuntimeValue to inject runtime config: "
+ s);
}
return runTimeProxies.get(s);
}
// TODO - Remove once we disallow the injection of runtime objects in build steps
if (s instanceof ParameterizedType p) {
if (p.getRawType() == RuntimeValue.class) {
Object object = runTimeProxies.get(p.getActualTypeArguments()[0]);
if (object == null) {
return new RuntimeValue<>();
}
return new RuntimeValue<>(object);
}
}
return null;
})
: null;
for (int i = 0; i < methodArgs.length; i++) {
methodArgs[i] = methodParamFns.get(i).apply(bc, bri);
}
Object result;
try {
result = methodHandle.bindTo(instance).invokeWithArguments(methodArgs);
} catch (IllegalAccessException e) {
throw ReflectUtil.toError(e);
} catch (RuntimeException | Error e2) {
throw e2;
} catch (Throwable t) {
throw new UndeclaredThrowableException(t);
}
resultConsumer.accept(bc, result);
if (isRecorder) {
// commit recorded data
if (recordAnnotation.value() == ExecutionTime.STATIC_INIT) {
bc.produce(new StaticBytecodeRecorderBuildItem(bri));
} else {
bc.produce(new MainBytecodeRecorderBuildItem(bri));
}
}
}
@Override
public String getId() {
return stepId;
}
public String toString() {
return name;
}
});
finalStepConfig.accept(bsb);
});
}
return chainConfig;
}
private static MethodHandle unreflect(Method method, MethodHandles.Lookup lookup) {
try {
return lookup.unreflect(method);
} catch (IllegalAccessException e) {
throw ReflectUtil.toError(e);
}
}
private static BooleanSupplier and(BooleanSupplier addStep, BooleanSupplierFactoryBuildItem supplierFactory,
Class<? extends BooleanSupplier>[] testClasses, boolean inv) {
for (Class<? extends BooleanSupplier> testClass : testClasses) {
BooleanSupplier bs = supplierFactory.get((Class<? extends BooleanSupplier>) testClass);
if (inv) {
addStep = and(addStep, not(bs));
} else {
addStep = and(addStep, bs);
}
}
return addStep;
}
private static boolean isAnEmptyBuildItemProducer(Type parameterType) {
return isBuildProducerOf(parameterType, EmptyBuildItem.class)
|| isSupplierOf(parameterType, EmptyBuildItem.class)
|| isSupplierOfOptionalOf(parameterType, EmptyBuildItem.class);
}
private static boolean isAnEmptyBuildItemConsumer(Type parameterType) {
return rawTypeExtends(parameterType, EmptyBuildItem.class)
|| isOptionalOf(parameterType, EmptyBuildItem.class)
|| isConsumerOf(parameterType, EmptyBuildItem.class);
}
private static IllegalArgumentException unsupportedConstructorOrFieldProducer(final AnnotatedElement element) {
return reportError(element, "Producing values from constructors or fields is no longer supported."
+ " Inject the BuildProducer/Consumer through arguments of relevant @BuildStep methods instead.");
}
protected static List<Method> getMethods(Class<?> clazz) {
List<Method> declaredMethods = new ArrayList<>();
if (!clazz.getName().equals(Object.class.getName())) {
declaredMethods.addAll(getMethods(clazz.getSuperclass()));
declaredMethods.addAll(asList(clazz.getDeclaredMethods()));
}
declaredMethods.sort(MethodComparator.INSTANCE);
return declaredMethods;
}
private static BooleanSupplier and(BooleanSupplier a, BooleanSupplier b) {
return () -> a.getAsBoolean() && b.getAsBoolean();
}
private static BooleanSupplier not(BooleanSupplier x) {
return () -> !x.getAsBoolean();
}
static IllegalArgumentException reportError(AnnotatedElement e, String msg) {
if (e instanceof Member) {
return new IllegalArgumentException(msg + " at " + e + " of " + ((Member) e).getDeclaringClass());
} else if (e instanceof Parameter) {
return new IllegalArgumentException(msg + " at " + e + " of " + ((Parameter) e).getDeclaringExecutable() + " of "
+ ((Parameter) e).getDeclaringExecutable().getDeclaringClass());
} else {
return new IllegalArgumentException(msg + " at " + e);
}
}
private static | BiConsumer |
java | quarkusio__quarkus | extensions/datasource/deployment/src/main/java/io/quarkus/datasource/deployment/DataSourcesExcludedFromHealthChecksProcessor.java | {
"start": 532,
"end": 1091
} | class ____ {
@BuildStep
@Record(RUNTIME_INIT)
void produceBean(
Capabilities capabilities,
DataSourceRecorder recorder,
BuildProducer<SyntheticBeanBuildItem> syntheticBeans) {
syntheticBeans.produce(SyntheticBeanBuildItem.configure(DataSourceSupport.class)
.scope(Singleton.class)
.unremovable()
.runtimeValue(recorder.createDataSourceSupport())
.setRuntimeInit()
.done());
}
}
| DataSourcesExcludedFromHealthChecksProcessor |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/ClassUtils.java | {
"start": 37269,
"end": 37725
} | class ____; may be null.
* @return the canonical name without the package name or an empty string.
* @since 2.4
* @see Class#getCanonicalName()
*/
public static String getShortCanonicalName(final Class<?> cls) {
return cls == null ? StringUtils.EMPTY : getShortCanonicalName(cls.getCanonicalName());
}
/**
* Gets the canonical name minus the package name for an {@link Object}.
*
* @param object the | name |
java | spring-projects__spring-boot | module/spring-boot-health/src/main/java/org/springframework/boot/health/registry/ReactiveHealthContributorRegistry.java | {
"start": 1025,
"end": 1781
} | interface ____ extends ReactiveHealthContributors {
/**
* Register a contributor with the given {@code name}.
* @param name the name of the contributor
* @param contributor the contributor to register
* @throws IllegalStateException if the contributor cannot be registered with the
* given {@code name}.
*/
void registerContributor(String name, ReactiveHealthContributor contributor);
/**
* Unregister a previously registered contributor.
* @param name the name of the contributor to unregister
* @return the unregistered indicator, or {@code null} if no indicator was found in
* the registry for the given {@code name}.
*/
@Nullable ReactiveHealthContributor unregisterContributor(String name);
}
| ReactiveHealthContributorRegistry |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/ConfigurableApplicationContext.java | {
"start": 4661,
"end": 7073
} | class ____ created,
* for example in case of WebApplicationContext setup.
* @param parent the parent context
* @see org.springframework.web.context.ConfigurableWebApplicationContext
*/
void setParent(@Nullable ApplicationContext parent);
/**
* Set the {@code Environment} for this application context.
* @param environment the new environment
* @since 3.1
*/
void setEnvironment(ConfigurableEnvironment environment);
/**
* Return the {@code Environment} for this application context in configurable
* form, allowing for further customization.
* @since 3.1
*/
@Override
ConfigurableEnvironment getEnvironment();
/**
* Set the {@link ApplicationStartup} for this application context.
* <p>This allows the application context to record metrics
* during startup.
* @param applicationStartup the new context event factory
* @since 5.3
*/
void setApplicationStartup(ApplicationStartup applicationStartup);
/**
* Return the {@link ApplicationStartup} for this application context.
* @since 5.3
*/
ApplicationStartup getApplicationStartup();
/**
* Add a new BeanFactoryPostProcessor that will get applied to the internal
* bean factory of this application context on refresh, before any of the
* bean definitions get evaluated. To be invoked during context configuration.
* @param postProcessor the factory processor to register
*/
void addBeanFactoryPostProcessor(BeanFactoryPostProcessor postProcessor);
/**
* Add a new ApplicationListener that will be notified on context events
* such as context refresh and context shutdown.
* <p>Note that any ApplicationListener registered here will be applied
* on refresh if the context is not active yet, or on the fly with the
* current event multicaster in case of a context that is already active.
* @param listener the ApplicationListener to register
* @see org.springframework.context.event.ContextRefreshedEvent
* @see org.springframework.context.event.ContextClosedEvent
*/
void addApplicationListener(ApplicationListener<?> listener);
/**
* Remove the given ApplicationListener from this context's set of listeners,
* assuming it got registered via {@link #addApplicationListener} before.
* @param listener the ApplicationListener to deregister
* @since 6.0
*/
void removeApplicationListener(ApplicationListener<?> listener);
/**
* Specify the ClassLoader to load | is |
java | redisson__redisson | redisson-micronaut/redisson-micronaut-20/src/main/java/org/redisson/micronaut/session/AttributesPutAllMessage.java | {
"start": 896,
"end": 1913
} | class ____ extends AttributeMessage {
private Map<CharSequence, byte[]> attrs;
public AttributesPutAllMessage() {
}
public AttributesPutAllMessage(String nodeId, String sessionId, Map<CharSequence, Object> attrs, Encoder encoder) throws IOException {
super(nodeId, sessionId);
if (attrs != null) {
this.attrs = new HashMap<>();
for (Entry<CharSequence, Object> entry: attrs.entrySet()) {
this.attrs.put(entry.getKey(), toByteArray(encoder, entry.getValue()));
}
} else {
this.attrs = null;
}
}
public Map<CharSequence, Object> getAttrs(Decoder<?> decoder) throws IOException, ClassNotFoundException {
if (attrs == null) {
return null;
}
Map<CharSequence, Object> result = new HashMap<>();
for (Entry<CharSequence, byte[]> entry: attrs.entrySet()) {
result.put(entry.getKey(), toObject(decoder, entry.getValue()));
}
return result;
}
}
| AttributesPutAllMessage |
java | grpc__grpc-java | services/src/main/java/io/grpc/protobuf/services/BinaryLogs.java | {
"start": 821,
"end": 1830
} | class ____ {
/**
* Creates a binary log that writes to a temp file. <b>Warning:</b> this implementation is
* not performance optimized, and RPCs will experience back pressure if disk IO does not keep
* up.
*/
public static BinaryLog createBinaryLog() throws IOException {
return new BinaryLogProviderImpl();
}
/**
* Deprecated and will be removed in a future version of gRPC.
*/
@Deprecated
public static BinaryLog createBinaryLog(BinaryLogSink sink) throws IOException {
return new BinaryLogProviderImpl(sink);
}
/**
* Creates a binary log with a custom {@link BinaryLogSink} for receiving the logged data,
* and a config string as defined by
* <a href="https://github.com/grpc/proposal/blob/master/A16-binary-logging.md">
* A16-binary-logging</a>.
*/
public static BinaryLog createBinaryLog(BinaryLogSink sink, String configStr) throws IOException {
return new BinaryLogProviderImpl(sink, configStr);
}
private BinaryLogs() {}
}
| BinaryLogs |
java | apache__rocketmq | client/src/main/java/org/apache/rocketmq/client/impl/producer/TopicPublishInfo.java | {
"start": 1519,
"end": 5354
} | interface ____ {
boolean filter(MessageQueue mq);
}
public boolean isOrderTopic() {
return orderTopic;
}
public void setOrderTopic(boolean orderTopic) {
this.orderTopic = orderTopic;
}
public boolean ok() {
return null != this.messageQueueList && !this.messageQueueList.isEmpty();
}
public List<MessageQueue> getMessageQueueList() {
return messageQueueList;
}
public void setMessageQueueList(List<MessageQueue> messageQueueList) {
this.messageQueueList = messageQueueList;
}
public ThreadLocalIndex getSendWhichQueue() {
return sendWhichQueue;
}
public void setSendWhichQueue(ThreadLocalIndex sendWhichQueue) {
this.sendWhichQueue = sendWhichQueue;
}
public boolean isHaveTopicRouterInfo() {
return haveTopicRouterInfo;
}
public void setHaveTopicRouterInfo(boolean haveTopicRouterInfo) {
this.haveTopicRouterInfo = haveTopicRouterInfo;
}
public MessageQueue selectOneMessageQueue(QueueFilter ...filter) {
return selectOneMessageQueue(this.messageQueueList, this.sendWhichQueue, filter);
}
private MessageQueue selectOneMessageQueue(List<MessageQueue> messageQueueList, ThreadLocalIndex sendQueue, QueueFilter ...filter) {
if (messageQueueList == null || messageQueueList.isEmpty()) {
return null;
}
if (filter != null && filter.length != 0) {
for (int i = 0; i < messageQueueList.size(); i++) {
int index = Math.abs(sendQueue.incrementAndGet() % messageQueueList.size());
MessageQueue mq = messageQueueList.get(index);
boolean filterResult = true;
for (QueueFilter f: filter) {
Preconditions.checkNotNull(f);
filterResult &= f.filter(mq);
}
if (filterResult) {
return mq;
}
}
return null;
}
int index = Math.abs(sendQueue.incrementAndGet() % messageQueueList.size());
return messageQueueList.get(index);
}
public void resetIndex() {
this.sendWhichQueue.reset();
}
public MessageQueue selectOneMessageQueue(final String lastBrokerName) {
if (lastBrokerName == null) {
return selectOneMessageQueue();
} else {
for (int i = 0; i < this.messageQueueList.size(); i++) {
MessageQueue mq = selectOneMessageQueue();
if (!mq.getBrokerName().equals(lastBrokerName)) {
return mq;
}
}
return selectOneMessageQueue();
}
}
public MessageQueue selectOneMessageQueue() {
int index = this.sendWhichQueue.incrementAndGet();
int pos = index % this.messageQueueList.size();
return this.messageQueueList.get(pos);
}
public int getWriteQueueNumsByBroker(final String brokerName) {
for (int i = 0; i < topicRouteData.getQueueDatas().size(); i++) {
final QueueData queueData = this.topicRouteData.getQueueDatas().get(i);
if (queueData.getBrokerName().equals(brokerName)) {
return queueData.getWriteQueueNums();
}
}
return -1;
}
@Override
public String toString() {
return "TopicPublishInfo [orderTopic=" + orderTopic + ", messageQueueList=" + messageQueueList
+ ", sendWhichQueue=" + sendWhichQueue + ", haveTopicRouterInfo=" + haveTopicRouterInfo + "]";
}
public TopicRouteData getTopicRouteData() {
return topicRouteData;
}
public void setTopicRouteData(final TopicRouteData topicRouteData) {
this.topicRouteData = topicRouteData;
}
}
| QueueFilter |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/LogManager.java | {
"start": 12551,
"end": 13256
} | class ____ the container's classpath then a different LoggerContext may
* be returned. If true then only a single LoggerContext will be returned.
* @return a LoggerContext.
*/
protected static LoggerContext getContext(final String fqcn, final boolean currentContext) {
try {
return factory.getContext(fqcn, null, null, currentContext);
} catch (final IllegalStateException ex) {
LOGGER.warn("{} Using SimpleLogger", ex.getMessage());
return SimpleLoggerContextFactory.INSTANCE.getContext(fqcn, null, null, currentContext);
}
}
/**
* Returns a LoggerContext
*
* @param fqcn The fully qualified | in |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/WriteNullListAsEmptyTest.java | {
"start": 1573,
"end": 1827
} | class ____ {
private List<Long> names = null;
public List<Long> getNames() {
return names;
}
public void setNames(List<Long> names) {
this.names = names;
}
}
}
| VO4 |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/insert/MySqlInsertTest_36.java | {
"start": 1211,
"end": 2997
} | class ____ extends TestCase {
public void test_insert_0() throws Exception {
String sql = "insert into -- @@@\n" +
" tablex(id, value) -- @@@\n" +
" values (?, ?)";
MySqlStatementParser parser = new MySqlStatementParser(sql, false, true);
parser.config(SQLParserFeature.KeepInsertValueClauseOriginalString, true);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
MySqlInsertStatement insertStmt = (MySqlInsertStatement) stmt;
assertEquals(1, insertStmt.getValuesList().size());
assertEquals(2, insertStmt.getValues().getValues().size());
assertEquals(2, insertStmt.getColumns().size());
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor v = new MySqlSchemaStatVisitor();
stmt.accept(v);
String formatSql = "INSERT INTO tablex (id, value)\n" +
"VALUES (?, ?)";
assertEquals(formatSql, SQLUtils.toMySqlString(insertStmt));
String psql = ParameterizedOutputVisitorUtils.parameterize(sql, JdbcConstants.MYSQL);
assertEquals("INSERT INTO tablex(id, value)\n" +
"VALUES (?, ?)", psql);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, v.getTables().size());
// assertEquals(1, visitor.getColumns().size());
// assertEquals(0, visitor.getConditions().size());
// assertEquals(0, visitor.getOrderByColumns().size());
}
}
| MySqlInsertTest_36 |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/destination/DestinationPackageNameTest.java | {
"start": 499,
"end": 3143
} | class ____ {
@ProcessorTest
@WithClasses({ DestinationPackageNameMapper.class })
public void shouldGenerateInRightPackage() {
DestinationPackageNameMapper instance = DestinationPackageNameMapper.INSTANCE;
assertThat( instance.getClass().getName() )
.isEqualTo( "org.mapstruct.ap.test.destination.dest.DestinationPackageNameMapperImpl" );
}
@ProcessorTest
@WithClasses({ DestinationPackageNameMapperWithSuffix.class })
public void shouldGenerateInRightPackageWithSuffix() {
DestinationPackageNameMapperWithSuffix instance = DestinationPackageNameMapperWithSuffix.INSTANCE;
assertThat( instance.getClass().getName() )
.isEqualTo( "org.mapstruct.ap.test.destination.dest.DestinationPackageNameMapperWithSuffixMyImpl" );
}
@ProcessorTest
@WithClasses({ DestinationPackageNameMapperConfig.class, DestinationPackageNameMapperWithConfig.class })
public void shouldGenerateRightSuffixWithConfig() {
DestinationPackageNameMapperWithConfig instance = DestinationPackageNameMapperWithConfig.INSTANCE;
assertThat( instance.getClass().getName() )
.isEqualTo( "org.mapstruct.ap.test.destination.dest.DestinationPackageNameMapperWithConfigImpl" );
}
@ProcessorTest
@WithClasses({ DestinationPackageNameMapperConfig.class, DestinationPackageNameMapperWithConfigOverride.class })
public void shouldGenerateRightSuffixWithConfigOverride() {
DestinationPackageNameMapperWithConfigOverride instance =
DestinationPackageNameMapperWithConfigOverride.INSTANCE;
assertThat( instance.getClass().getName() )
.isEqualTo(
"org.mapstruct.ap.test.destination.my_dest.DestinationPackageNameMapperWithConfigOverrideImpl"
);
}
@ProcessorTest
@WithClasses({ DestinationPackageNameMapperDecorated.class, DestinationPackageNameMapperDecorator.class })
public void shouldGenerateRightSuffixWithDecorator() {
DestinationPackageNameMapperDecorated instance = DestinationPackageNameMapperDecorated.INSTANCE;
assertThat( instance.getClass().getName() )
.isEqualTo( "org.mapstruct.ap.test.destination.dest.DestinationPackageNameMapperDecoratedImpl" );
assertThat( instance ).isInstanceOf( DestinationPackageNameMapperDecorator.class );
assertThat( ( (DestinationPackageNameMapperDecorator) instance ).delegate.getClass().getName() )
.isEqualTo( "org.mapstruct.ap.test.destination.dest.DestinationPackageNameMapperDecoratedImpl_" );
}
}
| DestinationPackageNameTest |
java | netty__netty | resolver-dns/src/main/java/io/netty/resolver/dns/DnsNameResolverTimeoutException.java | {
"start": 960,
"end": 1292
} | class ____ extends DnsNameResolverException {
private static final long serialVersionUID = -8826717969627131854L;
public DnsNameResolverTimeoutException(
InetSocketAddress remoteAddress, DnsQuestion question, String message) {
super(remoteAddress, question, message);
}
}
| DnsNameResolverTimeoutException |
java | google__dagger | javatests/dagger/functional/assisted/AssistedFactoryWithArrayTypesTest.java | {
"start": 1099,
"end": 1168
} | interface ____ {
Foo create(Dep[] depArray);
}
static | FooFactory |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/transaction/managed/ManagedTransactionFactoryTest.java | {
"start": 1265,
"end": 2259
} | class ____ extends BaseDataTest {
@Mock
private Connection conn;
@Test
void shouldEnsureThatCallsToManagedTransactionAPIDoNotForwardToManagedConnections() throws Exception {
TransactionFactory tf = new ManagedTransactionFactory();
tf.setProperties(new Properties());
Transaction tx = tf.newTransaction(conn);
assertEquals(conn, tx.getConnection());
tx.commit();
tx.rollback();
tx.close();
verify(conn).close();
}
@Test
void shouldEnsureThatCallsToManagedTransactionAPIDoNotForwardToManagedConnectionsAndDoesNotCloseConnection()
throws Exception {
TransactionFactory tf = new ManagedTransactionFactory();
Properties props = new Properties();
props.setProperty("closeConnection", "false");
tf.setProperties(props);
Transaction tx = tf.newTransaction(conn);
assertEquals(conn, tx.getConnection());
tx.commit();
tx.rollback();
tx.close();
verifyNoMoreInteractions(conn);
}
}
| ManagedTransactionFactoryTest |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/server/reactive/ServerHttpsRequestIntegrationTests.java | {
"start": 3536,
"end": 4047
} | class ____ implements HttpHandler {
@Override
public Mono<Void> handle(ServerHttpRequest request, ServerHttpResponse response) {
URI uri = request.getURI();
assertThat(uri.getScheme()).isEqualTo("https");
assertThat(uri.getHost()).isNotNull();
assertThat(uri.getPort()).isNotEqualTo(-1);
assertThat(request.getRemoteAddress()).isNotNull();
assertThat(uri.getPath()).isEqualTo("/foo");
assertThat(uri.getQuery()).isEqualTo("param=bar");
return Mono.empty();
}
}
}
| CheckRequestHandler |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/certReload/ManagementHttpServerTlsCertificateReloadTest.java | {
"start": 5281,
"end": 7329
} | interface ____ port
.setDefaultHost("localhost")
.setTrustOptions(new PemTrustOptions().addCertPath(new File(certs, "/ca.crt").getAbsolutePath()));
String response1 = vertx.createHttpClient(options)
.request(HttpMethod.GET, "/q/hello")
.flatMap(HttpClientRequest::send)
.flatMap(HttpClientResponse::body)
.map(Buffer::toString)
.toCompletionStage().toCompletableFuture().join();
// Update certs
Files.copy(new File("target/certificates/reload-D.crt").toPath(),
new File(certs, "/tls.crt").toPath(), java.nio.file.StandardCopyOption.REPLACE_EXISTING);
Files.copy(new File("target/certificates/reload-D.key").toPath(),
new File(certs, "/tls.key").toPath(), java.nio.file.StandardCopyOption.REPLACE_EXISTING);
// Trigger the reload
TlsCertificateReloader.reload().toCompletableFuture().get(10, TimeUnit.SECONDS);
// The client truststore is not updated, thus it should fail.
assertThatThrownBy(() -> vertx.createHttpClient(options)
.request(HttpMethod.GET, "/hello")
.flatMap(HttpClientRequest::send)
.flatMap(HttpClientResponse::body)
.map(Buffer::toString)
.toCompletionStage().toCompletableFuture().join()).hasCauseInstanceOf(SSLHandshakeException.class);
var options2 = new HttpClientOptions(options)
.setTrustOptions(new PemTrustOptions().addCertPath("target/certificates/reload-D-ca.crt"));
var response2 = vertx.createHttpClient(options2)
.request(HttpMethod.GET, "/hello")
.flatMap(HttpClientRequest::send)
.flatMap(HttpClientResponse::body)
.map(Buffer::toString)
.toCompletionStage().toCompletableFuture().join();
assertThat(response1).isNotEqualTo(response2); // Because cert duration are different.
}
public static | test |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/async/AbstractAsynchronousOperationHandlers.java | {
"start": 6620,
"end": 10212
} | class ____<T extends RestfulGateway, V, M extends MessageParameters>
extends AbstractRestHandler<T, EmptyRequestBody, AsynchronousOperationResult<V>, M> {
protected StatusHandler(
GatewayRetriever<? extends T> leaderRetriever,
Duration timeout,
Map<String, String> responseHeaders,
MessageHeaders<EmptyRequestBody, AsynchronousOperationResult<V>, M>
messageHeaders) {
super(leaderRetriever, timeout, responseHeaders, messageHeaders);
}
@Override
public CompletableFuture<AsynchronousOperationResult<V>> handleRequest(
@Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull T gateway)
throws RestHandlerException {
final K key = getOperationKey(request);
final Optional<OperationResult<R>> operationResultOptional =
completedOperationCache.get(key);
if (!operationResultOptional.isPresent()) {
return FutureUtils.completedExceptionally(
new NotFoundException("Operation not found under key: " + key));
}
final OperationResult<R> operationResult = operationResultOptional.get();
switch (operationResult.getStatus()) {
case SUCCESS:
return CompletableFuture.completedFuture(
AsynchronousOperationResult.completed(
operationResultResponse(operationResult.getResult())));
case FAILURE:
return CompletableFuture.completedFuture(
AsynchronousOperationResult.completed(
exceptionalOperationResultResponse(
operationResult.getThrowable())));
case IN_PROGRESS:
return CompletableFuture.completedFuture(
AsynchronousOperationResult.inProgress());
default:
throw new IllegalStateException(
"No handler for operation status "
+ operationResult.getStatus()
+ ", encountered for key "
+ key);
}
}
@Override
public CompletableFuture<Void> closeHandlerAsync() {
return completedOperationCache.closeAsync();
}
/**
* Extract the operation key under which the operation result future is stored.
*
* @param request with which the status handler has been called
* @return Operation key under which the operation result future is stored
*/
protected abstract K getOperationKey(HandlerRequest<EmptyRequestBody> request);
/**
* Create an exceptional operation result from the given {@link Throwable}. This method is
* called if the asynchronous operation failed.
*
* @param throwable failure of the asynchronous operation
* @return Exceptional operation result
*/
protected abstract V exceptionalOperationResultResponse(Throwable throwable);
/**
* Create the operation result from the given value.
*
* @param operationResult of the asynchronous operation
* @return Operation result
*/
protected abstract V operationResultResponse(R operationResult);
}
}
| StatusHandler |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/deviceplugin/VolumeSpec.java | {
"start": 2380,
"end": 3037
} | class ____ {
private String volumeDriver;
private String volumeName;
private String volumeOperation;
private Builder(){}
public static Builder newInstance() {
return new Builder();
}
public VolumeSpec build() {
return new VolumeSpec(this);
}
public Builder setVolumeDriver(String volDriver) {
this.volumeDriver = volDriver;
return this;
}
public Builder setVolumeName(String volName) {
this.volumeName = volName;
return this;
}
public Builder setVolumeOperation(String volOperation) {
this.volumeOperation = volOperation;
return this;
}
}
}
| Builder |
java | quarkusio__quarkus | extensions/smallrye-reactive-messaging/runtime/src/main/java/io/quarkus/smallrye/reactivemessaging/runtime/WorkerConfiguration.java | {
"start": 63,
"end": 1186
} | class ____ {
private String className;
private String methodName;
private String poolName;
private boolean virtualThread;
public WorkerConfiguration() {
}
public WorkerConfiguration(String className, String name, String poolName, boolean virtualThread) {
this.className = className;
this.methodName = name;
this.poolName = poolName;
this.virtualThread = virtualThread;
}
public String getClassName() {
return className;
}
public void setClassName(String className) {
this.className = className;
}
public String getMethodName() {
return methodName;
}
public void setMethodName(String methodName) {
this.methodName = methodName;
}
public String getPoolName() {
return poolName;
}
public void setPoolName(String poolName) {
this.poolName = poolName;
}
public boolean isVirtualThread() {
return virtualThread;
}
public void setVirtualThread(boolean virtualThread) {
this.virtualThread = virtualThread;
}
}
| WorkerConfiguration |
java | apache__kafka | storage/src/test/java/org/apache/kafka/storage/internals/log/RemoteIndexCacheTest.java | {
"start": 4544,
"end": 72339
} | class ____ {
private final long defaultRemoteIndexCacheSizeBytes = 1024 * 1024L;
private final Logger logger = LoggerFactory.getLogger(RemoteIndexCacheTest.class);
private final MockTime time = new MockTime();
private final int brokerId = 1;
private final long baseOffset = Integer.MAX_VALUE + 101337L; // start with a base offset which is a long
private final long lastOffset = baseOffset + 30L;
private final int segmentSize = 1024;
private final RemoteStorageManager rsm = mock(RemoteStorageManager.class);
private RemoteIndexCache cache;
private RemoteLogSegmentMetadata rlsMetadata;
private File logDir;
private File tpDir;
private TopicIdPartition idPartition;
@BeforeEach
public void setup() throws IOException, RemoteStorageException {
idPartition = new TopicIdPartition(Uuid.randomUuid(), new TopicPartition("foo", 0));
logDir = TestUtils.tempDirectory("kafka-" + this.getClass().getSimpleName());
tpDir = new File(logDir, idPartition.toString());
Files.createDirectory(tpDir.toPath());
RemoteLogSegmentId remoteLogSegmentId = RemoteLogSegmentId.generateNew(idPartition);
rlsMetadata = new RemoteLogSegmentMetadata(remoteLogSegmentId, baseOffset, lastOffset, time.milliseconds(),
brokerId, time.milliseconds(), segmentSize, Collections.singletonMap(0, 0L));
cache = new RemoteIndexCache(defaultRemoteIndexCacheSizeBytes, rsm, logDir.toString());
cache.setFileDeleteDelayMs(0);
mockRsmFetchIndex(rsm);
}
@AfterEach
public void cleanup() throws InterruptedException {
reset(rsm);
// the files created for the test will be deleted automatically on thread exit since we use temp dir
Utils.closeQuietly(cache, "RemoteIndexCache created for unit test");
// best effort to delete the per-test resource. Even if we don't delete, it is ok because the parent directory
// will be deleted at the end of test.
try {
Utils.delete(logDir);
} catch (IOException ioe) {
// ignore
}
// Verify no lingering threads. It is important to have this as the very last statement in the @AfterEach
// because this may throw an exception and prevent cleanup after it
TestUtils.assertNoLeakedThreadsWithNameAndDaemonStatus(REMOTE_LOG_INDEX_CACHE_CLEANER_THREAD, true);
}
@Test
public void testIndexFileNameAndLocationOnDisk() {
RemoteIndexCache.Entry entry = cache.getIndexEntry(rlsMetadata);
Path offsetIndexFile = entry.offsetIndex().file().toPath();
Path txnIndexFile = entry.txnIndex().file().toPath();
Path timeIndexFile = entry.timeIndex().file().toPath();
String expectedOffsetIndexFileName = remoteOffsetIndexFileName(rlsMetadata);
String expectedTimeIndexFileName = remoteTimeIndexFileName(rlsMetadata);
String expectedTxnIndexFileName = remoteTransactionIndexFileName(rlsMetadata);
assertEquals(expectedOffsetIndexFileName, offsetIndexFile.getFileName().toString());
assertEquals(expectedTxnIndexFileName, txnIndexFile.getFileName().toString());
assertEquals(expectedTimeIndexFileName, timeIndexFile.getFileName().toString());
// assert that parent directory for the index files is correct
assertEquals(DIR_NAME, offsetIndexFile.getParent().getFileName().toString(),
"offsetIndex=" + offsetIndexFile + " is created under incorrect parent");
assertEquals(DIR_NAME, txnIndexFile.getParent().getFileName().toString(),
"txnIndex=" + txnIndexFile + " is created under incorrect parent");
assertEquals(DIR_NAME, timeIndexFile.getParent().getFileName().toString(),
"timeIndex=" + timeIndexFile + " is created under incorrect parent");
}
@Test
public void testFetchIndexFromRemoteStorage() throws RemoteStorageException {
OffsetIndex offsetIndex = cache.getIndexEntry(rlsMetadata).offsetIndex();
OffsetPosition offsetPosition1 = offsetIndex.entry(1);
// this call should have invoked fetchOffsetIndex, fetchTimestampIndex once
int resultPosition = cache.lookupOffset(rlsMetadata, offsetPosition1.offset());
assertEquals(offsetPosition1.position(), resultPosition);
verifyFetchIndexInvocation(1, List.of(IndexType.OFFSET, IndexType.TIMESTAMP));
// this should not cause fetching index from RemoteStorageManager as it is already fetched earlier
reset(rsm);
OffsetPosition offsetPosition2 = offsetIndex.entry(2);
int resultPosition2 = cache.lookupOffset(rlsMetadata, offsetPosition2.offset());
assertEquals(offsetPosition2.position(), resultPosition2);
assertNotNull(cache.getIndexEntry(rlsMetadata));
verifyNoInteractions(rsm);
}
@Test
public void testFetchIndexForMissingTransactionIndex() throws RemoteStorageException {
when(rsm.fetchIndex(any(RemoteLogSegmentMetadata.class), any(IndexType.class))).thenAnswer(ans -> {
RemoteLogSegmentMetadata metadata = ans.getArgument(0);
IndexType indexType = ans.getArgument(1);
OffsetIndex offsetIdx = createOffsetIndexForSegmentMetadata(metadata, tpDir);
TimeIndex timeIdx = createTimeIndexForSegmentMetadata(metadata, tpDir);
maybeAppendIndexEntries(offsetIdx, timeIdx);
return switch (indexType) {
case OFFSET -> new FileInputStream(offsetIdx.file());
case TIMESTAMP -> new FileInputStream(timeIdx.file());
// Throw RemoteResourceNotFoundException since transaction index is not available
case TRANSACTION -> throw new RemoteResourceNotFoundException("txn index not found");
case LEADER_EPOCH -> null; // leader-epoch-cache is not accessed.
case PRODUCER_SNAPSHOT -> null; // producer-snapshot is not accessed.
};
});
RemoteIndexCache.Entry entry = cache.getIndexEntry(rlsMetadata);
// Verify an empty file is created in the cache directory
assertTrue(entry.txnIndex().file().exists());
assertEquals(0, entry.txnIndex().file().length());
}
@Test
public void testPositionForNonExistentEntry() {
OffsetIndex offsetIndex = cache.getIndexEntry(rlsMetadata).offsetIndex();
int lastOffsetPosition = cache.lookupOffset(rlsMetadata, offsetIndex.lastOffset());
long greaterOffsetThanLastOffset = offsetIndex.lastOffset() + 1;
assertEquals(lastOffsetPosition, cache.lookupOffset(rlsMetadata, greaterOffsetThanLastOffset));
// offsetIndex.lookup() returns OffsetPosition(baseOffset, 0) for offsets smaller than the last entry in the offset index.
OffsetPosition nonExistentOffsetPosition = new OffsetPosition(baseOffset, 0);
long lowerOffsetThanBaseOffset = offsetIndex.baseOffset() - 1;
assertEquals(nonExistentOffsetPosition.position(), cache.lookupOffset(rlsMetadata, lowerOffsetThanBaseOffset));
}
@Test
public void testCacheEntryExpiry() throws IOException, RemoteStorageException, InterruptedException {
long estimateEntryBytesSize = estimateOneEntryBytesSize();
// close existing cache created in test setup before creating a new one
Utils.closeQuietly(cache, "RemoteIndexCache created for unit test");
cache = new RemoteIndexCache(2 * estimateEntryBytesSize, rsm, logDir.toString());
TopicIdPartition tpId = new TopicIdPartition(Uuid.randomUuid(), new TopicPartition("foo", 0));
List<RemoteLogSegmentMetadata> metadataList = generateRemoteLogSegmentMetadata(3, tpId);
assertCacheSize(0);
// getIndex for first time will call rsm#fetchIndex
cache.getIndexEntry(metadataList.get(0));
assertCacheSize(1);
// Calling getIndex on the same entry should not call rsm#fetchIndex again, but it should retrieve from cache
cache.getIndexEntry(metadataList.get(0));
assertCacheSize(1);
verifyFetchIndexInvocation(1);
// Here a new key metadataList(1) is invoked, that should call rsm#fetchIndex, making the count to 2
cache.getIndexEntry(metadataList.get(0));
cache.getIndexEntry(metadataList.get(1));
assertCacheSize(2);
verifyFetchIndexInvocation(2);
// Getting index for metadataList.last should call rsm#fetchIndex
// to populate this entry one of the other 2 entries will be evicted. We don't know which one since it's based on
// a probabilistic formula for Window TinyLfu. See docs for RemoteIndexCache
int size = metadataList.size();
assertNotNull(cache.getIndexEntry(metadataList.get(size - 1)));
assertAtLeastOnePresent(cache, metadataList.get(1).remoteLogSegmentId().id(), metadataList.get(0).remoteLogSegmentId().id());
assertCacheSize(2);
verifyFetchIndexInvocation(3);
// getting index for last expired entry should call rsm#fetchIndex as that entry was expired earlier
Optional<RemoteLogSegmentMetadata> missingEntryOpt = Optional.empty();
for (RemoteLogSegmentMetadata entry : metadataList) {
Uuid segmentId = entry.remoteLogSegmentId().id();
if (!cache.internalCache().asMap().containsKey(segmentId)) {
missingEntryOpt = Optional.of(entry);
break;
}
}
assertFalse(missingEntryOpt.isEmpty());
cache.getIndexEntry(missingEntryOpt.get());
assertCacheSize(2);
verifyFetchIndexInvocation(4);
}
@Test
public void shouldThrowErrorWhenAccessedAfterCacheClose() throws RemoteStorageException, InterruptedException {
TopicIdPartition tpId = new TopicIdPartition(Uuid.randomUuid(), new TopicPartition("foo", 0));
List<RemoteLogSegmentMetadata> metadataList = generateRemoteLogSegmentMetadata(3, tpId);
assertCacheSize(0);
cache.getIndexEntry(metadataList.get(0));
assertCacheSize(1);
verifyFetchIndexInvocation(1);
cache.close();
assertThrows(IllegalStateException.class, () -> cache.getIndexEntry(metadataList.get(0)));
}
@Test
public void testCloseIsIdempotent() throws IOException {
// generate and add entry to cache
RemoteIndexCache.Entry spyEntry = generateSpyCacheEntry();
cache.internalCache().put(rlsMetadata.remoteLogSegmentId().id(), spyEntry);
cache.close();
cache.close();
// verify that entry is only closed once
verify(spyEntry).close();
}
@Test
public void testCacheEntryIsDeletedOnRemoval() throws IOException, InterruptedException {
Uuid internalIndexKey = rlsMetadata.remoteLogSegmentId().id();
RemoteIndexCache.Entry cacheEntry = generateSpyCacheEntry();
// verify index files on disk
assertTrue(getIndexFileFromDisk(LogFileUtils.INDEX_FILE_SUFFIX).isPresent(), "Offset index file should be present on disk at " + tpDir.toPath());
assertTrue(getIndexFileFromDisk(LogFileUtils.TXN_INDEX_FILE_SUFFIX).isPresent(), "Txn index file should be present on disk at " + tpDir.toPath());
assertTrue(getIndexFileFromDisk(LogFileUtils.TIME_INDEX_FILE_SUFFIX).isPresent(), "Time index file should be present on disk at " + tpDir.toPath());
// add the spied entry into the cache, it will overwrite the non-spied entry
cache.internalCache().put(internalIndexKey, cacheEntry);
// no expired entries yet
assertEquals(0, cache.expiredIdxPendingForDeletion(), "expiredIndex queue should be zero at start of test");
// call remove function to mark the entry for removal
cache.remove(internalIndexKey);
// wait until entry is marked for deletion
assertTrue(cacheEntry::isMarkedForCleanup,
"Failed to mark cache entry for cleanup after remove");
TestUtils.waitForCondition(cacheEntry::isCleanStarted,
"Failed to cleanup cache entry after remove");
verify(cacheEntry).markForCleanup();
// after that async it will be cleaned up
verify(cacheEntry).cleanup();
// verify that index(s) rename is only called 1 time
verify(cacheEntry.timeIndex()).renameTo(any(File.class));
verify(cacheEntry.offsetIndex()).renameTo(any(File.class));
verify(cacheEntry.txnIndex()).renameTo(any(File.class));
// wait until the delete method is invoked
TestUtils.waitForCondition(() -> {
try {
verify(cacheEntry.timeIndex()).deleteIfExists();
verify(cacheEntry.offsetIndex()).deleteIfExists();
verify(cacheEntry.txnIndex()).deleteIfExists();
return true;
} catch (Exception e) {
return false;
}
}, "Failed to delete index file");
// verify no index files on disk
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.INDEX_FILE_SUFFIX).isEmpty(),
"Offset index file should not be present on disk at " + tpDir.toPath());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TXN_INDEX_FILE_SUFFIX).isEmpty(),
"Txn index file should not be present on disk at " + tpDir.toPath());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TIME_INDEX_FILE_SUFFIX).isEmpty(),
"Time index file should not be present on disk at " + tpDir.toPath());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.DELETED_FILE_SUFFIX).isEmpty(),
"Index file marked for deletion should not be present on disk at " + tpDir.toPath());
}
private Optional<Path> getIndexFileFromDisk(String suffix) throws IOException {
return Files.walk(cache.cacheDir().toPath())
.filter(Files::isRegularFile)
.filter(path -> path.getFileName().toString().endsWith(suffix))
.findAny();
}
@Test
public void testCleanerThreadShutdown() throws IOException, InterruptedException {
// cache is empty at beginning
assertTrue(cache.internalCache().asMap().isEmpty());
// create a new entry
RemoteIndexCache.Entry spyEntry = generateSpyCacheEntry();
doAnswer(invocation -> {
invocation.callRealMethod();
// an exception should not close the cleaner thread
throw new RuntimeException("kaboom! I am expected exception in unit test.");
}).when(spyEntry).cleanup();
Uuid key = Uuid.randomUuid();
cache.internalCache().put(key, spyEntry);
// trigger cleanup
cache.remove(key);
// wait for cleanup to start
TestUtils.waitForCondition(spyEntry::isCleanStarted,
"Failed while waiting for clean up to start");
// Give the thread cleaner thread some time to throw an exception
Thread.sleep(100);
verify(spyEntry, times(1)).cleanup();
// Verify that Cleaner thread is still running even when exception is thrown in doWork()
Set<Thread> threads = getRunningCleanerThread();
assertEquals(1, threads.size(),
"Found unexpected " + threads.size() + " threads=" + threads.stream().map(Thread::getName).collect(Collectors.joining(", ")));
// close the cache properly
cache.close();
// verify that the thread is closed properly
TestUtils.waitForCondition(
() -> getRunningCleanerThread().isEmpty(),
() -> "Failed while waiting for cleaner threads to shutdown. Remaining threads: " +
getRunningCleanerThread().stream().map(Thread::getName).collect(Collectors.joining(", ")));
// if the thread is correctly being shutdown it will not be running
assertFalse(cache.cleanerScheduler().isStarted(), "Unexpected thread state=running. Check error logs.");
}
@Test
public void testClose() throws IOException, InterruptedException {
RemoteIndexCache.Entry spyEntry = generateSpyCacheEntry();
cache.internalCache().put(rlsMetadata.remoteLogSegmentId().id(), spyEntry);
TestUtils.waitForCondition(() -> cache.cleanerScheduler().isStarted(), "Cleaner thread should be started");
// close the cache
cache.close();
// closing the cache should close the entry
verify(spyEntry).close();
// close for all index entries must be invoked
verify(spyEntry.txnIndex()).close();
verify(spyEntry.offsetIndex()).close();
verify(spyEntry.timeIndex()).close();
// index files must not be deleted
verify(spyEntry.txnIndex(), times(0)).deleteIfExists();
verify(spyEntry.offsetIndex(), times(0)).deleteIfExists();
verify(spyEntry.timeIndex(), times(0)).deleteIfExists();
// verify cleaner thread is shutdown
assertFalse(cache.cleanerScheduler().isStarted());
}
@Test
public void testConcurrentReadWriteAccessForCache() throws InterruptedException, RemoteStorageException {
TopicIdPartition tpId = new TopicIdPartition(Uuid.randomUuid(), new TopicPartition("foo", 0));
List<RemoteLogSegmentMetadata> metadataList = generateRemoteLogSegmentMetadata(3, tpId);
assertCacheSize(0);
// getIndex for first time will call rsm#fetchIndex
cache.getIndexEntry(metadataList.get(0));
assertCacheSize(1);
verifyFetchIndexInvocation(1, List.of(IndexType.OFFSET, IndexType.TIMESTAMP));
reset(rsm);
// Simulate a concurrency situation where one thread is reading the entry already present in the cache (cache hit)
// and the other thread is reading an entry which is not available in the cache (cache miss). The expected behaviour
// is for the former thread to succeed while latter is fetching from rsm.
// In this test we simulate the situation using latches. We perform the following operations:
// 1. Start the CacheMiss thread and wait until it starts executing the rsm.fetchIndex
// 2. Block the CacheMiss thread inside the call to rsm.fetchIndex.
// 3. Start the CacheHit thread. Assert that it performs a successful read.
// 4. On completion of successful read by CacheHit thread, signal the CacheMiss thread to release its block.
// 5. Validate that the test passes. If the CacheMiss thread was blocking the CacheHit thread, the test will fail.
CountDownLatch latchForCacheHit = new CountDownLatch(1);
CountDownLatch latchForCacheMiss = new CountDownLatch(1);
Runnable readerCacheHit = () -> {
// Wait for signal to start executing the read
logger.debug("Waiting for signal to begin read from {}", Thread.currentThread());
try {
latchForCacheHit.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
RemoteIndexCache.Entry entry = cache.getIndexEntry(metadataList.get(0));
assertNotNull(entry);
// Signal the CacheMiss to unblock itself
logger.debug("Signaling CacheMiss to unblock from {}", Thread.currentThread());
latchForCacheMiss.countDown();
};
when(rsm.fetchIndex(any(RemoteLogSegmentMetadata.class), any(IndexType.class))).thenAnswer(answer -> {
logger.debug("Signaling CacheHit to begin read from {}", Thread.currentThread());
latchForCacheHit.countDown();
logger.debug("Waiting for signal to complete rsm fetch from {}", Thread.currentThread());
latchForCacheMiss.await();
return null;
});
Runnable readerCacheMiss = () -> {
int size = metadataList.size();
RemoteIndexCache.Entry entry = cache.getIndexEntry(metadataList.get(size - 1));
assertNotNull(entry);
};
ExecutorService executor = Executors.newFixedThreadPool(2);
try {
executor.submit(readerCacheMiss);
executor.submit(readerCacheHit);
assertTrue(latchForCacheMiss.await(30, TimeUnit.SECONDS));
} finally {
executor.shutdownNow();
}
}
@Test
public void testReloadCacheAfterClose() throws IOException, RemoteStorageException, InterruptedException {
long estimateEntryBytesSize = estimateOneEntryBytesSize();
// close existing cache created in test setup before creating a new one
Utils.closeQuietly(cache, "RemoteIndexCache created for unit test");
cache = new RemoteIndexCache(2 * estimateEntryBytesSize, rsm, logDir.toString());
TopicIdPartition tpId = new TopicIdPartition(Uuid.randomUuid(), new TopicPartition("foo", 0));
List<RemoteLogSegmentMetadata> metadataList = generateRemoteLogSegmentMetadata(3, tpId);
assertCacheSize(0);
// getIndex for first time will call rsm#fetchIndex
cache.getIndexEntry(metadataList.get(0));
assertCacheSize(1);
// Calling getIndex on the same entry should not call rsm#fetchIndex again, but it should retrieve from cache
cache.getIndexEntry(metadataList.get(0));
assertCacheSize(1);
verifyFetchIndexInvocation(1);
// Here a new key metadataList(1) is invoked, that should call rsm#fetchIndex, making the count to 2
cache.getIndexEntry(metadataList.get(1));
assertCacheSize(2);
// Calling getIndex on the same entry should not call rsm#fetchIndex again, but it should retrieve from cache
cache.getIndexEntry(metadataList.get(1));
assertCacheSize(2);
verifyFetchIndexInvocation(2);
// Here a new key metadataList(2) is invoked, that should call rsm#fetchIndex
// The cache max size is 2, it will remove one entry and keep the overall size to 2
cache.getIndexEntry(metadataList.get(2));
assertCacheSize(2);
// Calling getIndex on the same entry may call rsm#fetchIndex or not, it depends on the cache implementation so
// we only need to verify the number of calling is in our range.
cache.getIndexEntry(metadataList.get(2));
assertCacheSize(2);
verifyFetchIndexInvocationWithRange(3, 4);
// Close the cache
cache.close();
// Reload the cache from the disk and check the cache size is same as earlier
RemoteIndexCache reloadedCache = new RemoteIndexCache(2 * estimateEntryBytesSize, rsm, logDir.toString());
assertEquals(2, reloadedCache.internalCache().asMap().size());
reloadedCache.close();
verifyNoMoreInteractions(rsm);
}
@Test
public void testRemoveItem() throws IOException {
RemoteLogSegmentId segmentId = rlsMetadata.remoteLogSegmentId();
Uuid segmentUuid = segmentId.id();
// generate and add entry to cache
RemoteIndexCache.Entry spyEntry = generateSpyCacheEntry(segmentId);
cache.internalCache().put(segmentUuid, spyEntry);
assertTrue(cache.internalCache().asMap().containsKey(segmentUuid));
assertFalse(spyEntry.isMarkedForCleanup());
cache.remove(segmentId.id());
assertFalse(cache.internalCache().asMap().containsKey(segmentUuid));
assertTrue(spyEntry::isMarkedForCleanup, "Failed to mark cache entry for cleanup after remove");
}
@Test
public void testRemoveNonExistentItem() throws IOException {
// generate and add entry to cache
RemoteLogSegmentId segmentId = rlsMetadata.remoteLogSegmentId();
Uuid segmentUuid = segmentId.id();
// generate and add entry to cache
RemoteIndexCache.Entry spyEntry = generateSpyCacheEntry(segmentId);
cache.internalCache().put(segmentUuid, spyEntry);
assertTrue(cache.internalCache().asMap().containsKey(segmentUuid));
// remove a random Uuid
cache.remove(Uuid.randomUuid());
assertTrue(cache.internalCache().asMap().containsKey(segmentUuid));
assertFalse(spyEntry.isMarkedForCleanup());
}
@Test
public void testRemoveMultipleItems() throws IOException {
// generate and add entry to cache
Map<Uuid, RemoteIndexCache.Entry> uuidAndEntryList = new HashMap<>();
for (int i = 0; i < 10; i++) {
RemoteLogSegmentId segmentId = RemoteLogSegmentId.generateNew(idPartition);
Uuid segmentUuid = segmentId.id();
RemoteIndexCache.Entry spyEntry = generateSpyCacheEntry(segmentId);
uuidAndEntryList.put(segmentUuid, spyEntry);
cache.internalCache().put(segmentUuid, spyEntry);
assertTrue(cache.internalCache().asMap().containsKey(segmentUuid));
assertFalse(spyEntry.isMarkedForCleanup());
}
cache.removeAll(uuidAndEntryList.keySet());
for (RemoteIndexCache.Entry entry : uuidAndEntryList.values()) {
assertTrue(entry::isMarkedForCleanup, "Failed to mark cache entry for cleanup after removeAll");
}
}
@Test
public void testClearCacheAndIndexFilesWhenResizeCache() throws InterruptedException {
TopicIdPartition tpId = new TopicIdPartition(Uuid.randomUuid(), new TopicPartition("foo", 0));
List<RemoteLogSegmentMetadata> metadataList = generateRemoteLogSegmentMetadata(1, tpId);
assertCacheSize(0);
// getIndex for first time will call rsm#fetchIndex
RemoteIndexCache.Entry cacheEntry = cache.getIndexEntry(metadataList.get(0));
assertCacheSize(1);
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.INDEX_FILE_SUFFIX).isPresent());
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TXN_INDEX_FILE_SUFFIX).isPresent());
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TIME_INDEX_FILE_SUFFIX).isPresent());
cache.resizeCacheSize(1L);
// wait until entry is marked for deletion
TestUtils.waitForCondition(cacheEntry::isMarkedForCleanup,
"Failed to mark cache entry for cleanup after resizing cache.");
TestUtils.waitForCondition(cacheEntry::isCleanStarted,
"Failed to cleanup cache entry after resizing cache.");
// verify no index files on remote cache dir
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.INDEX_FILE_SUFFIX).isEmpty(),
"Offset index file should not be present on disk at " + cache.cacheDir());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TXN_INDEX_FILE_SUFFIX).isEmpty(),
"Txn index file should not be present on disk at " + cache.cacheDir());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TIME_INDEX_FILE_SUFFIX).isEmpty(),
"Time index file should not be present on disk at " + cache.cacheDir());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.DELETED_FILE_SUFFIX).isEmpty(),
"Index file marked for deletion should not be present on disk at " + cache.cacheDir());
assertCacheSize(0);
}
@Test
public void testCorrectnessForCacheAndIndexFilesWhenResizeCache() throws IOException, InterruptedException, RemoteStorageException {
// The test process for resizing is: put 1 entry -> evict to empty -> put 3 entries with limited capacity of 2 entries ->
// evict to 1 entry -> resize to 1 entry size -> resize to 2 entries size
long estimateEntryBytesSize = estimateOneEntryBytesSize();
TopicIdPartition tpId = new TopicIdPartition(Uuid.randomUuid(), new TopicPartition("foo", 0));
List<RemoteLogSegmentMetadata> metadataList = generateRemoteLogSegmentMetadata(3, tpId);
assertCacheSize(0);
// getIndex for first time will call rsm#fetchIndex
RemoteIndexCache.Entry cacheEntry = cache.getIndexEntry(metadataList.get(0));
assertCacheSize(1);
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.INDEX_FILE_SUFFIX).isPresent());
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TXN_INDEX_FILE_SUFFIX).isPresent());
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TIME_INDEX_FILE_SUFFIX).isPresent());
// Reduce the cache size to 1 byte to ensure that all the entries are evicted from it.
cache.resizeCacheSize(1L);
// wait until entry is marked for deletion
TestUtils.waitForCondition(cacheEntry::isMarkedForCleanup,
"Failed to mark cache entry for cleanup after resizing cache.");
TestUtils.waitForCondition(cacheEntry::isCleanStarted,
"Failed to cleanup cache entry after resizing cache.");
// verify no index files on remote cache dir
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.INDEX_FILE_SUFFIX).isEmpty(),
"Offset index file should not be present on disk at " + cache.cacheDir());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TXN_INDEX_FILE_SUFFIX).isEmpty(),
"Txn index file should not be present on disk at " + cache.cacheDir());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TIME_INDEX_FILE_SUFFIX).isEmpty(),
"Time index file should not be present on disk at " + cache.cacheDir());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.DELETED_FILE_SUFFIX).isEmpty(),
"Index file marked for deletion should not be present on disk at " + cache.cacheDir());
assertCacheSize(0);
// Increase cache capacity to only store 2 entries
cache.resizeCacheSize(2 * estimateEntryBytesSize);
assertCacheSize(0);
RemoteIndexCache.Entry entry0 = cache.getIndexEntry(metadataList.get(0));
RemoteIndexCache.Entry entry1 = cache.getIndexEntry(metadataList.get(1));
RemoteIndexCache.Entry entry2 = cache.getIndexEntry(metadataList.get(2));
List<RemoteIndexCache.Entry> entries = List.of(entry0, entry1, entry2);
assertCacheSize(2);
EvictionResult result = verifyEntryIsEvicted(metadataList, entries, 1);
// Reduce cache capacity to only store 1 entry
cache.resizeCacheSize(1 * estimateEntryBytesSize);
assertCacheSize(1);
// After resize, we need to check an entry is deleted from cache and the existing segmentMetadata
List<RemoteIndexCache.Entry> entryInCache = entries.stream().filter(e -> !result.evictedEntries.contains(e)).toList();
List<RemoteLogSegmentMetadata> updatedSegmentMetadata = metadataList.stream().filter(e -> !result.evictedSegmentMetadata.contains(e)).toList();
verifyEntryIsEvicted(updatedSegmentMetadata, entryInCache, 1);
// resize to the same size, all entries should be kept
cache.resizeCacheSize(1 * estimateEntryBytesSize);
List<RemoteLogSegmentMetadata> entriesKept = getRemoteLogSegMetadataIsKept(metadataList);
// verify all existing entries (`cache.getIndexEntry(metadataList(2))`) are kept
verifyEntryIsKept(entriesKept);
assertCacheSize(1);
// increase the size
cache.resizeCacheSize(2 * estimateEntryBytesSize);
// verify all entries are kept
verifyEntryIsKept(entriesKept);
assertCacheSize(1);
}
private List<RemoteLogSegmentMetadata> getRemoteLogSegMetadataIsKept(List<RemoteLogSegmentMetadata> metadataToVerify) {
return metadataToVerify
.stream()
.filter(s -> cache.internalCache().asMap().containsKey(s.remoteLogSegmentId().id()))
.toList();
}
record EvictionResult(List<RemoteLogSegmentMetadata> evictedSegmentMetadata, List<RemoteIndexCache.Entry> evictedEntries) { }
private EvictionResult verifyEntryIsEvicted(List<RemoteLogSegmentMetadata> metadataToVerify, List<RemoteIndexCache.Entry> entriesToVerify, int numOfMarkAsDeleted) throws InterruptedException {
TestUtils.waitForCondition(() -> entriesToVerify.stream().filter(RemoteIndexCache.Entry::isMarkedForCleanup).count() == numOfMarkAsDeleted,
"Failed to mark evicted cache entry for cleanup after resizing cache.");
TestUtils.waitForCondition(() -> entriesToVerify.stream().filter(RemoteIndexCache.Entry::isCleanStarted).count() == numOfMarkAsDeleted,
"Failed to cleanup evicted cache entry after resizing cache.");
List<RemoteIndexCache.Entry> entriesIsMarkedForCleanup = entriesToVerify.stream().filter(RemoteIndexCache.Entry::isMarkedForCleanup).toList();
List<RemoteIndexCache.Entry> entriesIsCleanStarted = entriesToVerify.stream().filter(RemoteIndexCache.Entry::isCleanStarted).toList();
// clean up entries and clean start entries should be the same
assertEquals(entriesIsMarkedForCleanup, entriesIsCleanStarted);
// get the logSegMetadata are evicted
List<RemoteLogSegmentMetadata> metadataDeleted = metadataToVerify
.stream()
.filter(s -> !cache.internalCache().asMap().containsKey(s.remoteLogSegmentId().id()))
.toList();
assertEquals(numOfMarkAsDeleted, metadataDeleted.size());
for (RemoteLogSegmentMetadata metadata : metadataDeleted) {
// verify no index files for `entryToVerify` on remote cache dir
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, remoteOffsetIndexFileName(metadata)).isEmpty(),
"Offset index file for evicted entry should not be present on disk at " + cache.cacheDir());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, remoteTimeIndexFileName(metadata)).isEmpty(),
"Time index file for evicted entry should not be present on disk at " + cache.cacheDir());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, remoteTransactionIndexFileName(metadata)).isEmpty(),
"Txn index file for evicted entry should not be present on disk at " + cache.cacheDir());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.DELETED_FILE_SUFFIX).isEmpty(),
"Index file marked for deletion for evicted entry should not be present on disk at " + cache.cacheDir());
}
return new EvictionResult(metadataDeleted, entriesIsMarkedForCleanup);
}
private void verifyEntryIsKept(List<RemoteLogSegmentMetadata> metadataToVerify) {
for (RemoteLogSegmentMetadata metadata : metadataToVerify) {
assertTrue(getIndexFileFromRemoteCacheDir(cache, remoteOffsetIndexFileName(metadata)).isPresent());
assertTrue(getIndexFileFromRemoteCacheDir(cache, remoteTimeIndexFileName(metadata)).isPresent());
assertTrue(getIndexFileFromRemoteCacheDir(cache, remoteTransactionIndexFileName(metadata)).isPresent());
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.DELETED_FILE_SUFFIX).isEmpty());
}
}
@ParameterizedTest
@EnumSource(value = IndexType.class, names = {"OFFSET", "TIMESTAMP", "TRANSACTION"})
public void testCorruptCacheIndexFileExistsButNotInCache(IndexType indexType) throws IOException, RemoteStorageException {
// create Corrupted Index File in remote index cache
createCorruptedIndexFile(indexType, cache.cacheDir());
RemoteIndexCache.Entry entry = cache.getIndexEntry(rlsMetadata);
// Test would fail if it throws Exception other than CorruptIndexException
Path offsetIndexFile = entry.offsetIndex().file().toPath();
Path txnIndexFile = entry.txnIndex().file().toPath();
Path timeIndexFile = entry.timeIndex().file().toPath();
String expectedOffsetIndexFileName = remoteOffsetIndexFileName(rlsMetadata);
String expectedTimeIndexFileName = remoteTimeIndexFileName(rlsMetadata);
String expectedTxnIndexFileName = remoteTransactionIndexFileName(rlsMetadata);
assertEquals(expectedOffsetIndexFileName, offsetIndexFile.getFileName().toString());
assertEquals(expectedTxnIndexFileName, txnIndexFile.getFileName().toString());
assertEquals(expectedTimeIndexFileName, timeIndexFile.getFileName().toString());
// assert that parent directory for the index files is correct
assertEquals(DIR_NAME, offsetIndexFile.getParent().getFileName().toString(),
"offsetIndex=" + offsetIndexFile + " is created under incorrect parent");
assertEquals(DIR_NAME, txnIndexFile.getParent().getFileName().toString(),
"txnIndex=" + txnIndexFile + " is created under incorrect parent");
assertEquals(DIR_NAME, timeIndexFile.getParent().getFileName().toString(),
"timeIndex=" + timeIndexFile + " is created under incorrect parent");
// file is corrupted it should fetch from remote storage again
verifyFetchIndexInvocation(1);
}
@Test
@Flaky("KAFKA-19286")
public void testConcurrentRemoveReadForCache1() throws IOException, InterruptedException, ExecutionException {
// Create a spy Cache Entry
RemoteIndexCache.Entry spyEntry = generateSpyCacheEntry();
cache.internalCache().put(rlsMetadata.remoteLogSegmentId().id(), spyEntry);
assertCacheSize(1);
CountDownLatch latchForCacheRead = new CountDownLatch(1);
CountDownLatch latchForCacheRemove = new CountDownLatch(1);
CountDownLatch latchForTestWait = new CountDownLatch(1);
AtomicInteger cleanupCallCount = new AtomicInteger(0);
doAnswer(invocation -> {
cleanupCallCount.incrementAndGet();
if (cleanupCallCount.get() == 1) {
// Signal the CacheRead to unblock itself
latchForCacheRead.countDown();
// Wait for signal to start deleting the renamed files
latchForCacheRemove.await();
// Calling the cleanup() actual method to remove the renamed files
invocation.callRealMethod();
// Signal TestWait to unblock itself so that test can be completed
latchForTestWait.countDown();
}
return null;
}).when(spyEntry).cleanup();
Runnable removeCache = () -> cache.remove(rlsMetadata.remoteLogSegmentId().id());
Runnable readCache = () -> {
// Wait for signal to start CacheRead
try {
latchForCacheRead.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
cache.getIndexEntry(rlsMetadata);
// Signal the CacheRemove to start renaming the files
latchForCacheRemove.countDown();
};
executeConcurrentRemoveRead(removeCache, readCache, latchForTestWait);
}
@Test
public void testConcurrentRemoveReadForCache2() throws IOException, InterruptedException, ExecutionException {
RemoteIndexCache.Entry spyEntry = generateSpyCacheEntry();
cache.internalCache().put(rlsMetadata.remoteLogSegmentId().id(), spyEntry);
assertCacheSize(1);
CountDownLatch latchForCacheRead = new CountDownLatch(1);
CountDownLatch latchForCacheRemove = new CountDownLatch(1);
CountDownLatch latchForTestWait = new CountDownLatch(1);
AtomicInteger cleanupCallCount = new AtomicInteger(0);
doAnswer((InvocationOnMock invocation) -> {
cleanupCallCount.incrementAndGet();
if (cleanupCallCount.get() == 1) {
// Wait for signal to start renaming the files
latchForCacheRemove.await();
// Calling the cleanup() actual method to remove the renamed files
invocation.callRealMethod();
// Signal the CacheRead to unblock itself
latchForCacheRead.countDown();
}
return null;
}).when(spyEntry).cleanup();
Runnable removeCache = () -> cache.remove(rlsMetadata.remoteLogSegmentId().id());
Runnable readCache = () -> {
// Wait for signal to start CacheRead
latchForCacheRemove.countDown();
try {
latchForCacheRead.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
cache.getIndexEntry(rlsMetadata);
// Signal TestWait to unblock itself so that test can be completed
latchForTestWait.countDown();
};
executeConcurrentRemoveRead(removeCache, readCache, latchForTestWait);
}
private void executeConcurrentRemoveRead(Runnable removeCache,
Runnable readCache,
CountDownLatch latchForTestWait) throws InterruptedException, ExecutionException {
ExecutorService executor = Executors.newFixedThreadPool(2);
try {
Future<?> removeCacheFuture = executor.submit(removeCache);
Future<?> readCacheFuture = executor.submit(readCache);
// Verify both tasks are completed without any exception
removeCacheFuture.get();
readCacheFuture.get();
// Wait for signal to complete the test
latchForTestWait.await();
// Read or cleaner thread whichever goes first, the cache size should remain one:
// 1. If reader thread runs first, then it will fetch the entry from remote since the previous entry in
// local disk was renamed with ".deleted" as suffix. The previous and current entry objects are different.
// And, the cleaner thread should only remove the files with suffix as ".deleted".
// 2. If removal thread runs first, then it will remove the files with ".deleted" suffix. And, the reader
// thread will fetch the entry again from remote storage.
assertCacheSize(1);
} finally {
executor.shutdownNow();
}
}
@Test
public void testMultipleIndexEntriesExecutionInCorruptException() throws IOException, RemoteStorageException {
reset(rsm);
when(rsm.fetchIndex(any(RemoteLogSegmentMetadata.class), any(IndexType.class))).thenAnswer(ans -> {
RemoteLogSegmentMetadata metadata = ans.getArgument(0);
IndexType indexType = ans.getArgument(1);
OffsetIndex offsetIdx = createOffsetIndexForSegmentMetadata(metadata, tpDir);
TimeIndex timeIdx = createTimeIndexForSegmentMetadata(metadata, tpDir);
TransactionIndex txnIdx = createTxIndexForSegmentMetadata(metadata, tpDir);
maybeAppendIndexEntries(offsetIdx, timeIdx);
// Create corrupted index file
createCorruptTimeIndexOffsetFile(tpDir);
return switch (indexType) {
case OFFSET -> new FileInputStream(offsetIdx.file());
case TIMESTAMP -> new FileInputStream(timeIdx.file());
case TRANSACTION -> new FileInputStream(txnIdx.file());
case LEADER_EPOCH -> null; // leader-epoch-cache is not accessed.
case PRODUCER_SNAPSHOT -> null; // producer-snapshot is not accessed.
};
});
assertThrows(CorruptIndexException.class, () -> cache.getIndexEntry(rlsMetadata));
assertNull(cache.internalCache().getIfPresent(rlsMetadata.remoteLogSegmentId().id()));
verifyFetchIndexInvocation(1, List.of(IndexType.OFFSET, IndexType.TIMESTAMP));
verifyFetchIndexInvocation(0, List.of(IndexType.TRANSACTION));
// Current status
// (cache is null)
// RemoteCacheDir contain
// 1. Offset Index File is fine and not corrupted
// 2. Time Index File is corrupted
// What should be the code flow in next execution
// 1. No rsm call for fetching OffSet Index File.
// 2. Time index file should be fetched from remote storage again as it is corrupted in the first execution.
// 3. Transaction index file should be fetched from remote storage.
reset(rsm);
// delete all files created in tpDir
List<Path> paths = Files.walk(tpDir.toPath(), 1)
.filter(Files::isRegularFile)
.toList();
for (Path path : paths) {
Files.deleteIfExists(path);
}
// rsm should return no corrupted file in the 2nd execution
mockRsmFetchIndex(rsm);
cache.getIndexEntry(rlsMetadata);
// rsm should not be called to fetch offset Index
verifyFetchIndexInvocation(0, List.of(IndexType.OFFSET));
verifyFetchIndexInvocation(1, List.of(IndexType.TIMESTAMP));
// Transaction index would be fetched again
// as previous getIndexEntry failed before fetchTransactionIndex
verifyFetchIndexInvocation(1, List.of(IndexType.TRANSACTION));
}
@Test
public void testIndexFileAlreadyExistOnDiskButNotInCache() throws InterruptedException, IOException, RemoteStorageException {
File remoteIndexCacheDir = cache.cacheDir();
String tempSuffix = ".tmptest";
RemoteIndexCache.Entry entry = cache.getIndexEntry(rlsMetadata);
verifyFetchIndexInvocation(1);
// copy files with temporary name
Path tmpOffsetIdxPath = Files.copy(entry.offsetIndex().file().toPath(), Paths.get(Utils.replaceSuffix(entry.offsetIndex().file().getPath(), "", tempSuffix)));
Path tmpTxnIdxPath = Files.copy(entry.txnIndex().file().toPath(), Paths.get(Utils.replaceSuffix(entry.txnIndex().file().getPath(), "", tempSuffix)));
Path tmpTimeIdxPath = Files.copy(entry.timeIndex().file().toPath(), Paths.get(Utils.replaceSuffix(entry.timeIndex().file().getPath(), "", tempSuffix)));
cache.remove(rlsMetadata.remoteLogSegmentId().id());
// wait until entry is marked for deletion
TestUtils.waitForCondition(entry::isMarkedForCleanup,
"Failed to mark cache entry for cleanup after invalidation");
TestUtils.waitForCondition(entry::isCleanStarted,
"Failed to cleanup cache entry after invalidation");
// restore index files
renameRemoteCacheIndexFileFromDisk(tmpOffsetIdxPath, tmpTxnIdxPath, tmpTimeIdxPath, tempSuffix);
// validate cache entry for the above key should be null
assertNull(cache.internalCache().getIfPresent(rlsMetadata.remoteLogSegmentId().id()));
cache.getIndexEntry(rlsMetadata);
// Index Files already exist ,rsm should not fetch them again.
verifyFetchIndexInvocation(1);
// verify index files on disk
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.INDEX_FILE_SUFFIX).isPresent(), "Offset index file should be present on disk at " + remoteIndexCacheDir.toPath());
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TXN_INDEX_FILE_SUFFIX).isPresent(), "Txn index file should be present on disk at " + remoteIndexCacheDir.toPath());
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TIME_INDEX_FILE_SUFFIX).isPresent(), "Time index file should be present on disk at " + remoteIndexCacheDir.toPath());
}
private void renameRemoteCacheIndexFileFromDisk(Path tmpOffsetIdxFile,
Path tmpTxnIdxFile,
Path tmpTimeIdxFile,
String tempSuffix) throws IOException {
for (Path path : new Path[]{tmpOffsetIdxFile, tmpTxnIdxFile, tmpTimeIdxFile}) {
Utils.atomicMoveWithFallback(path,
path.resolveSibling(path.getFileName().toString().replace(tempSuffix, "")));
}
}
@ParameterizedTest
@EnumSource(value = IndexType.class, names = {"OFFSET", "TIMESTAMP", "TRANSACTION"})
public void testRSMReturnCorruptedIndexFile(IndexType testIndexType) throws RemoteStorageException {
when(rsm.fetchIndex(any(RemoteLogSegmentMetadata.class), any(IndexType.class))).thenAnswer(ans -> {
RemoteLogSegmentMetadata metadata = ans.getArgument(0);
IndexType indexType = ans.getArgument(1);
OffsetIndex offsetIdx = createOffsetIndexForSegmentMetadata(metadata, tpDir);
TimeIndex timeIdx = createTimeIndexForSegmentMetadata(metadata, tpDir);
TransactionIndex txnIdx = createTxIndexForSegmentMetadata(metadata, tpDir);
maybeAppendIndexEntries(offsetIdx, timeIdx);
// Create corrupt index file return from RSM
createCorruptedIndexFile(testIndexType, tpDir);
return switch (indexType) {
case OFFSET -> new FileInputStream(offsetIdx.file());
case TIMESTAMP -> new FileInputStream(timeIdx.file());
case TRANSACTION -> new FileInputStream(txnIdx.file());
case LEADER_EPOCH -> null; // leader-epoch-cache is not accessed.
case PRODUCER_SNAPSHOT -> null; // producer-snapshot is not accessed.
};
});
assertThrows(CorruptIndexException.class, () -> cache.getIndexEntry(rlsMetadata));
}
@Test
public void testConcurrentCacheDeletedFileExists() throws InterruptedException, IOException {
File remoteIndexCacheDir = cache.cacheDir();
RemoteIndexCache.Entry entry = cache.getIndexEntry(rlsMetadata);
// verify index files on disk
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.INDEX_FILE_SUFFIX).isPresent(), "Offset index file should be present on disk at " + remoteIndexCacheDir.toPath());
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TXN_INDEX_FILE_SUFFIX).isPresent(), "Txn index file should be present on disk at " + remoteIndexCacheDir.toPath());
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TIME_INDEX_FILE_SUFFIX).isPresent(), "Time index file should be present on disk at " + remoteIndexCacheDir.toPath());
// Simulating a concurrency issue where deleted files already exist on disk
// This happens when cleanerThread is slow and not able to delete index entries
// while same index Entry is cached again and invalidated.
// The new deleted file created should be replaced by existing deleted file.
// create deleted suffix file
Files.copy(entry.offsetIndex().file().toPath(), Paths.get(Utils.replaceSuffix(entry.offsetIndex().file().getPath(), "", LogFileUtils.DELETED_FILE_SUFFIX)));
Files.copy(entry.txnIndex().file().toPath(), Paths.get(Utils.replaceSuffix(entry.txnIndex().file().getPath(), "", LogFileUtils.DELETED_FILE_SUFFIX)));
Files.copy(entry.timeIndex().file().toPath(), Paths.get(Utils.replaceSuffix(entry.timeIndex().file().getPath(), "", LogFileUtils.DELETED_FILE_SUFFIX)));
// verify deleted file exists on disk
assertTrue(getIndexFileFromRemoteCacheDir(cache, LogFileUtils.DELETED_FILE_SUFFIX).isPresent(), "Deleted Offset index file should be present on disk at " + remoteIndexCacheDir.toPath());
cache.remove(rlsMetadata.remoteLogSegmentId().id());
// wait until entry is marked for deletion
TestUtils.waitForCondition(entry::isMarkedForCleanup,
"Failed to mark cache entry for cleanup after invalidation");
TestUtils.waitForCondition(entry::isCleanStarted,
"Failed to cleanup cache entry after invalidation");
// verify no index files on disk
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.INDEX_FILE_SUFFIX).isEmpty(),
"Offset index file should not be present on disk at " + remoteIndexCacheDir.toPath());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TXN_INDEX_FILE_SUFFIX).isEmpty(),
"Txn index file should not be present on disk at " + remoteIndexCacheDir.toPath());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.TIME_INDEX_FILE_SUFFIX).isEmpty(),
"Time index file should not be present on disk at " + remoteIndexCacheDir.toPath());
TestUtils.waitForCondition(() -> getIndexFileFromRemoteCacheDir(cache, LogFileUtils.DELETED_FILE_SUFFIX).isEmpty(),
"Index file marked for deletion should not be present on disk at " + remoteIndexCacheDir.toPath());
}
@Test
public void testDeleteInvalidIndexFilesOnInit() throws IOException {
File cacheDir = cache.cacheDir();
long baseOffset = 100L;
UUID uuid = UUID.randomUUID();
String invalidOffsetIdxFilename = String.format("%s_%s%s%s", baseOffset, uuid, LogFileUtils.INDEX_FILE_SUFFIX, LogFileUtils.DELETED_FILE_SUFFIX);
File invalidOffsetIdxFile = new File(cacheDir, invalidOffsetIdxFilename);
invalidOffsetIdxFile.createNewFile();
String invalidTimeIdxFilename = String.format("%s_%s%s%s", baseOffset, uuid, LogFileUtils.TIME_INDEX_FILE_SUFFIX, ".tmp");
File invalidTimeIndexFile = new File(cacheDir, invalidTimeIdxFilename);
invalidTimeIndexFile.createNewFile();
RemoteLogSegmentMetadata rlsMetadata = new RemoteLogSegmentMetadata(RemoteLogSegmentId.generateNew(idPartition), baseOffset + 100,
lastOffset, time.milliseconds(), brokerId, time.milliseconds(), segmentSize, Collections.singletonMap(0, 0L));
OffsetIndex validOffsetIdx = createOffsetIndexForSegmentMetadata(rlsMetadata, logDir);
TransactionIndex validTimeIdx = createTxIndexForSegmentMetadata(rlsMetadata, logDir);
new RemoteIndexCache(defaultRemoteIndexCacheSizeBytes, rsm, logDir.toString());
assertFalse(invalidOffsetIdxFile.exists());
assertFalse(invalidTimeIndexFile.exists());
assertTrue(validOffsetIdx.file().exists());
assertTrue(validTimeIdx.file().exists());
}
@Test
public void testFetchIndexAccessibleWhenMarkedForCleanup() throws IOException, RemoteStorageException {
// setting the delayMs to a large value to disable file deletion by scheduler thread to have deterministic test
cache.setFileDeleteDelayMs(300_000);
File cacheDir = cache.cacheDir();
Uuid segmentUuid = rlsMetadata.remoteLogSegmentId().id();
RemoteIndexCache.Entry indexEntry = cache.getIndexEntry(rlsMetadata);
cache.remove(segmentUuid);
// Once marked for cleanup, the 3 index files should be renamed with ".deleted" suffix
assertEquals(3, countFiles(cacheDir, name -> true));
assertEquals(3, countFiles(cacheDir,
name -> name.contains(segmentUuid.toString()) && name.endsWith(LogFileUtils.DELETED_FILE_SUFFIX)));
// Ensure that the `indexEntry` object still able to access the renamed index files after being marked for deletion
OffsetPosition offsetPosition = indexEntry.offsetIndex().entry(2);
assertEquals(offsetPosition.position(), indexEntry.lookupOffset(offsetPosition.offset()).position());
assertNull(cache.internalCache().asMap().get(segmentUuid));
verifyFetchIndexInvocation(1);
// Once the entry gets removed from cache, the subsequent call to the cache should re-fetch the entry from remote.
assertEquals(offsetPosition.position(), cache.lookupOffset(rlsMetadata, offsetPosition.offset()));
verifyFetchIndexInvocation(2);
RemoteIndexCache.Entry indexEntry2 = cache.getIndexEntry(rlsMetadata);
assertNotNull(indexEntry2);
verifyFetchIndexInvocation(2);
// There will be 6 files in the remote-log-index-cache dir: 3 original index files and 3 files with ".deleted" suffix
assertEquals(6, countFiles(cacheDir, name -> true));
assertEquals(3, countFiles(cacheDir,
name -> name.contains(segmentUuid.toString()) && !name.endsWith(LogFileUtils.DELETED_FILE_SUFFIX)));
assertEquals(3, countFiles(cacheDir,
name -> name.contains(segmentUuid.toString()) && name.endsWith(LogFileUtils.DELETED_FILE_SUFFIX)));
// Once the indexEntry2 is marked for cleanup, the 3 index files should be renamed with ".deleted" suffix.
// Both indexEntry and indexEntry2 should be able to access the renamed index files.
cache.remove(segmentUuid);
assertEquals(3, countFiles(cacheDir, name -> true));
assertEquals(3, countFiles(cacheDir,
name -> name.contains(segmentUuid.toString()) && name.endsWith(LogFileUtils.DELETED_FILE_SUFFIX)));
assertEquals(offsetPosition.position(), indexEntry.lookupOffset(offsetPosition.offset()).position());
assertEquals(offsetPosition.position(), indexEntry2.lookupOffset(offsetPosition.offset()).position());
indexEntry.cleanup();
assertEquals(0, countFiles(cacheDir, name -> true));
assertThrows(IllegalStateException.class, () -> indexEntry.lookupOffset(offsetPosition.offset()));
assertEquals(offsetPosition.position(), indexEntry2.lookupOffset(offsetPosition.offset()).position());
indexEntry2.cleanup();
assertEquals(0, countFiles(cacheDir, name -> true));
assertThrows(IllegalStateException.class, () -> indexEntry.lookupOffset(offsetPosition.offset()));
assertThrows(IllegalStateException.class, () -> indexEntry2.lookupOffset(offsetPosition.offset()));
}
private int countFiles(File cacheDir, Predicate<String> condition) {
return Objects.requireNonNull(cacheDir.listFiles((dir, name) -> condition.test(name))).length;
}
private RemoteIndexCache.Entry generateSpyCacheEntry() throws IOException {
return generateSpyCacheEntry(RemoteLogSegmentId.generateNew(idPartition));
}
private RemoteIndexCache.Entry generateSpyCacheEntry(RemoteLogSegmentId remoteLogSegmentId) throws IOException {
return generateSpyCacheEntry(remoteLogSegmentId, new File(logDir, DIR_NAME));
}
private RemoteIndexCache.Entry generateSpyCacheEntry(RemoteLogSegmentId remoteLogSegmentId,
File dir) throws IOException {
RemoteLogSegmentMetadata rlsMetadata = new RemoteLogSegmentMetadata(remoteLogSegmentId, baseOffset,
lastOffset, time.milliseconds(), brokerId, time.milliseconds(),
segmentSize, Collections.singletonMap(0, 0L));
TimeIndex timeIndex = spy(createTimeIndexForSegmentMetadata(rlsMetadata, dir));
TransactionIndex txIndex = spy(createTxIndexForSegmentMetadata(rlsMetadata, dir));
OffsetIndex offsetIndex = spy(createOffsetIndexForSegmentMetadata(rlsMetadata, dir));
return spy(new RemoteIndexCache.Entry(offsetIndex, timeIndex, txIndex));
}
private void assertAtLeastOnePresent(RemoteIndexCache cache, Uuid... uuids) {
for (Uuid uuid : uuids) {
if (cache.internalCache().asMap().containsKey(uuid)) return;
}
fail("all uuids are not present in cache");
}
private void assertCacheSize(int expectedSize) throws InterruptedException {
// Cache may grow beyond the size temporarily while evicting, hence, run in a loop to validate
// that cache reaches correct state eventually
TestUtils.waitForCondition(() -> cache.internalCache().asMap().size() == expectedSize,
"cache did not adhere to expected size of " + expectedSize);
}
private void verifyFetchIndexInvocation(int count) throws RemoteStorageException {
verifyFetchIndexInvocation(count, List.of(IndexType.OFFSET, IndexType.TIMESTAMP, IndexType.TRANSACTION));
}
private void verifyFetchIndexInvocation(int count, List<IndexType> indexTypes) throws RemoteStorageException {
for (IndexType indexType : indexTypes) {
verify(rsm, times(count)).fetchIndex(any(RemoteLogSegmentMetadata.class), eq(indexType));
}
}
private void verifyFetchIndexInvocationWithRange(int lower, int upper) throws RemoteStorageException {
List<IndexType> types = List.of(IndexType.OFFSET, IndexType.TIMESTAMP, IndexType.TRANSACTION);
for (IndexType indexType : types) {
verify(rsm, atLeast(lower)).fetchIndex(any(RemoteLogSegmentMetadata.class), eq(indexType));
verify(rsm, atMost(upper)).fetchIndex(any(RemoteLogSegmentMetadata.class), eq(indexType));
}
}
private TransactionIndex createTxIndexForSegmentMetadata(RemoteLogSegmentMetadata metadata, File dir) throws IOException {
File txnIdxFile = remoteTransactionIndexFile(dir, metadata);
txnIdxFile.createNewFile();
return new TransactionIndex(metadata.startOffset(), txnIdxFile);
}
private void createCorruptTxnIndexForSegmentMetadata(File dir, RemoteLogSegmentMetadata metadata) throws IOException {
File txnIdxFile = remoteTransactionIndexFile(dir, metadata);
txnIdxFile.createNewFile();
TransactionIndex txnIndex = new TransactionIndex(metadata.startOffset(), txnIdxFile);
List<AbortedTxn> abortedTxns = List.of(
new AbortedTxn(0L, 0, 10, 11),
new AbortedTxn(1L, 5, 15, 13),
new AbortedTxn(2L, 18, 35, 25),
new AbortedTxn(3L, 32, 50, 40));
for (AbortedTxn abortedTxn : abortedTxns) {
txnIndex.append(abortedTxn);
}
txnIndex.close();
// open the index with a different starting offset to fake invalid data
new TransactionIndex(100L, txnIdxFile);
}
private TimeIndex createTimeIndexForSegmentMetadata(RemoteLogSegmentMetadata metadata, File dir) throws IOException {
int maxEntries = (int) (metadata.endOffset() - metadata.startOffset());
return new TimeIndex(remoteTimeIndexFile(dir, metadata), metadata.startOffset(), maxEntries * 12);
}
private OffsetIndex createOffsetIndexForSegmentMetadata(RemoteLogSegmentMetadata metadata, File dir) throws IOException {
int maxEntries = (int) (metadata.endOffset() - metadata.startOffset());
return new OffsetIndex(remoteOffsetIndexFile(dir, metadata), metadata.startOffset(), maxEntries * 8);
}
private List<RemoteLogSegmentMetadata> generateRemoteLogSegmentMetadata(int size, TopicIdPartition tpId) {
List<RemoteLogSegmentMetadata> metadataList = new ArrayList<>();
for (int i = 0; i < size; i++) {
metadataList.add(new RemoteLogSegmentMetadata(new RemoteLogSegmentId(tpId, Uuid.randomUuid()), baseOffset * i, baseOffset * i + 10, time.milliseconds(), brokerId, time.milliseconds(), segmentSize, Collections.singletonMap(0, 0L)));
}
return metadataList;
}
private void maybeAppendIndexEntries(OffsetIndex offsetIndex, TimeIndex timeIndex) {
if (!offsetIndex.isFull()) {
long curTime = time.milliseconds();
for (int i = 0; i < offsetIndex.maxEntries(); i++) {
long offset = offsetIndex.baseOffset() + i;
offsetIndex.append(offset, i);
timeIndex.maybeAppend(curTime + i, offset, true);
}
offsetIndex.flush();
timeIndex.flush();
}
}
private long estimateOneEntryBytesSize() throws IOException, RemoteStorageException {
TopicPartition tp = new TopicPartition("estimate-entry-bytes-size", 0);
TopicIdPartition tpId = new TopicIdPartition(Uuid.randomUuid(), tp);
RemoteStorageManager rsm = mock(RemoteStorageManager.class);
mockRsmFetchIndex(rsm);
RemoteIndexCache cache = new RemoteIndexCache(2L, rsm, logDir.toString());
List<RemoteLogSegmentMetadata> metadataList = generateRemoteLogSegmentMetadata(1, tpId);
RemoteIndexCache.Entry entry = cache.getIndexEntry(metadataList.get(0));
long entrySizeInBytes = entry.entrySizeBytes();
entry.markForCleanup();
entry.cleanup();
Utils.closeQuietly(cache, "RemoteIndexCache created for estimating entry size");
return entrySizeInBytes;
}
private void mockRsmFetchIndex(RemoteStorageManager rsm) throws RemoteStorageException {
when(rsm.fetchIndex(any(RemoteLogSegmentMetadata.class), any(IndexType.class))).thenAnswer(ans -> {
RemoteLogSegmentMetadata metadata = ans.getArgument(0);
IndexType indexType = ans.getArgument(1);
OffsetIndex offsetIdx = createOffsetIndexForSegmentMetadata(metadata, tpDir);
TimeIndex timeIdx = createTimeIndexForSegmentMetadata(metadata, tpDir);
TransactionIndex txnIdx = createTxIndexForSegmentMetadata(metadata, tpDir);
maybeAppendIndexEntries(offsetIdx, timeIdx);
return switch (indexType) {
case OFFSET -> new FileInputStream(offsetIdx.file());
case TIMESTAMP -> new FileInputStream(timeIdx.file());
case TRANSACTION -> new FileInputStream(txnIdx.file());
case LEADER_EPOCH -> null; // leader-epoch-cache is not accessed.
case PRODUCER_SNAPSHOT -> null; // producer-snapshot is not accessed.
};
});
}
private void createCorruptOffsetIndexFile(File dir) throws FileNotFoundException {
PrintWriter pw = new PrintWriter(remoteOffsetIndexFile(dir, rlsMetadata));
pw.write("Hello, world");
// The size of the string written in the file is 12 bytes,
// but it should be multiple of Offset Index EntrySIZE which is equal to 8.
pw.close();
}
private void createCorruptTimeIndexOffsetFile(File dir) throws FileNotFoundException {
PrintWriter pw = new PrintWriter(remoteTimeIndexFile(dir, rlsMetadata));
pw.write("Hello, world1");
// The size of the string written in the file is 13 bytes,
// but it should be multiple of Time Index EntrySIZE which is equal to 12.
pw.close();
}
private void createCorruptedIndexFile(RemoteStorageManager.IndexType indexType, File dir) throws IOException {
if (indexType == RemoteStorageManager.IndexType.OFFSET) {
createCorruptOffsetIndexFile(dir);
} else if (indexType == IndexType.TIMESTAMP) {
createCorruptTimeIndexOffsetFile(dir);
} else if (indexType == IndexType.TRANSACTION) {
createCorruptTxnIndexForSegmentMetadata(dir, rlsMetadata);
}
}
private Optional<Path> getIndexFileFromRemoteCacheDir(RemoteIndexCache cache, String suffix) {
try {
return Files.walk(cache.cacheDir().toPath())
.filter(Files::isRegularFile)
.filter(path -> path.getFileName().toString().endsWith(suffix))
.findAny();
} catch (IOException exc) {
return Optional.empty();
}
}
private Set<Thread> getRunningCleanerThread() {
return Thread.getAllStackTraces().keySet()
.stream()
.filter(t -> t.isAlive() && t.getName().startsWith(REMOTE_LOG_INDEX_CACHE_CLEANER_THREAD))
.collect(Collectors.toSet());
}
}
| RemoteIndexCacheTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/LayoutVersion.java | {
"start": 1027,
"end": 1556
} | class ____ changes in the layout version of HDFS.
*
* Layout version is changed for following reasons:
* <ol>
* <li>The layout of how namenode or datanode stores information
* on disk changes.</li>
* <li>A new operation code is added to the editlog.</li>
* <li>Modification such as format of a record, content of a record
* in editlog or fsimage.</li>
* </ol>
* <br>
* <b>How to update layout version:<br></b>
* When a change requires new layout version, please add an entry into
* {@link Feature} with a short | tracks |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/support/TestConstructorUtilsTests.java | {
"start": 1442,
"end": 3635
} | class ____ {
private static final PropertyProvider propertyProvider = name -> null;
@AfterEach
void clearGlobalFlag() {
setGlobalFlag(null);
}
@Test
void notAutowirable() throws Exception {
assertNotAutowirable(NotAutowirableTestCase.class);
}
@Test
void autowiredAnnotation() throws Exception {
assertAutowirable(AutowiredAnnotationTestCase.class);
}
@Test
void testConstructorAnnotation() throws Exception {
assertAutowirable(TestConstructorAnnotationTestCase.class);
}
@Test
void testConstructorAsMetaAnnotation() throws Exception {
assertAutowirable(TestConstructorAsMetaAnnotationTestCase.class);
}
@Test
void automaticallyAutowired() throws Exception {
setGlobalFlag();
assertAutowirable(AutomaticallyAutowiredTestCase.class);
}
@Test
void automaticallyAutowiredButOverriddenLocally() throws Exception {
setGlobalFlag();
assertNotAutowirable(TestConstructorAnnotationOverridesGlobalFlagTestCase.class);
}
@Test
void globalFlagVariations() throws Exception {
Class<?> testClass = AutomaticallyAutowiredTestCase.class;
setGlobalFlag(ALL.name());
assertAutowirable(testClass);
setGlobalFlag(ALL.name().toLowerCase());
assertAutowirable(testClass);
setGlobalFlag("\t" + ALL.name().toLowerCase() + " ");
assertAutowirable(testClass);
setGlobalFlag("bogus");
assertNotAutowirable(testClass);
setGlobalFlag(" ");
assertNotAutowirable(testClass);
}
private void assertAutowirable(Class<?> testClass) throws NoSuchMethodException {
Constructor<?> constructor = testClass.getDeclaredConstructor();
assertThat(TestConstructorUtils.isAutowirableConstructor(constructor, propertyProvider)).isTrue();
}
private void assertNotAutowirable(Class<?> testClass) throws NoSuchMethodException {
Constructor<?> constructor = testClass.getDeclaredConstructor();
assertThat(TestConstructorUtils.isAutowirableConstructor(constructor, propertyProvider)).isFalse();
}
private void setGlobalFlag() {
setGlobalFlag(ALL.name());
}
private void setGlobalFlag(String flag) {
SpringProperties.setProperty(TestConstructor.TEST_CONSTRUCTOR_AUTOWIRE_MODE_PROPERTY_NAME, flag);
}
static | TestConstructorUtilsTests |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-dubbo/src/main/java/org/apache/dubbo/rpc/protocol/dubbo/pu/DubboDetector.java | {
"start": 1160,
"end": 1801
} | class ____ implements ProtocolDetector {
private final ChannelBuffer Preface =
new ByteBufferBackedChannelBuffer(ByteBuffer.wrap(new byte[] {(byte) 0xda, (byte) 0xbb}));
@Override
public Result detect(ChannelBuffer in) {
int prefaceLen = Preface.readableBytes();
int bytesRead = min(in.readableBytes(), prefaceLen);
if (bytesRead == 0 || !ChannelBuffers.prefixEquals(in, Preface, bytesRead)) {
return Result.unrecognized();
}
if (bytesRead == prefaceLen) {
return Result.recognized();
}
return Result.needMoreData();
}
}
| DubboDetector |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/io/JBoss6VFS.java | {
"start": 1348,
"end": 2303
} | class ____ {
static Class<?> VirtualFile;
static Method getPathNameRelativeTo;
static Method getChildrenRecursively;
Object virtualFile;
VirtualFile(Object virtualFile) {
this.virtualFile = virtualFile;
}
String getPathNameRelativeTo(VirtualFile parent) {
try {
return invoke(getPathNameRelativeTo, virtualFile, parent.virtualFile);
} catch (IOException e) {
// This exception is not thrown by the called method
log.error("This should not be possible. VirtualFile.getPathNameRelativeTo() threw IOException.");
return null;
}
}
List<VirtualFile> getChildren() throws IOException {
List<?> objects = invoke(getChildrenRecursively, virtualFile);
List<VirtualFile> children = new ArrayList<>(objects.size());
for (Object object : objects) {
children.add(new VirtualFile(object));
}
return children;
}
}
/** A | VirtualFile |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/functions/aggregation/Comparator.java | {
"start": 2085,
"end": 2518
} | class ____ extends Comparator {
private static final long serialVersionUID = 1L;
@Override
public <R> int isExtremal(Comparable<R> o1, R o2) {
int c = o1.compareTo(o2);
if (c > 0) {
return 1;
}
if (c == 0) {
return 0;
} else {
return -1;
}
}
}
private static | MaxByComparator |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/internal/tools/OrmTools.java | {
"start": 657,
"end": 2253
} | class ____ {
public static Object loadAuditEntity(String entityName, Object id, SharedSessionContractImplementor session) {
if ( session instanceof SessionImplementor statefulSession ) {
return statefulSession.getReference( entityName, id );
}
else if ( session instanceof StatelessSession statelessSession ) {
return statelessSession.get( entityName, id );
}
else {
throw unexpectedSessionType( session );
}
}
public static void saveData(
String auditEntityName,
Object data,
SharedSessionContractImplementor session) {
if ( session instanceof SessionImplementor statefulSession ) {
statefulSession.persist( auditEntityName, data );
}
else if ( session instanceof StatelessSession statelessSession ) {
statelessSession.insert( auditEntityName, data );
}
else {
throw unexpectedSessionType( session );
}
}
public static void removeData(Object data, SharedSessionContractImplementor session) {
if ( session instanceof SessionImplementor statefulSession ) {
statefulSession.remove( data );
}
else if ( session instanceof StatelessSessionImplementor statelessSession ) {
statelessSession.delete( data );
}
else {
unexpectedSessionType( session );
}
}
private static UnsupportedOperationException unexpectedSessionType(SharedSessionContractImplementor session) {
throw new UnsupportedOperationException( String.format(
Locale.ROOT,
"Unexpected argument type (`%s`); expecting `%s` or `%s`",
session.getClass().getName(),
Session.class.getName(),
StatelessSession.class.getName()
) );
}
}
| OrmTools |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedScalarFunctions.java | {
"start": 8936,
"end": 9435
} | class ____ extends ScalarFunction
implements PythonFunction {
@DataTypeHint("TIMESTAMP(3)")
public LocalDateTime eval(@DataTypeHint(inputGroup = InputGroup.ANY) Object... o) {
return LocalDateTime.now();
}
@Override
public byte[] getSerializedPythonFunction() {
return new byte[0];
}
@Override
public PythonEnv getPythonEnv() {
return null;
}
}
}
| PythonTimestampScalarFunction |
java | quarkusio__quarkus | extensions/devui/deployment-spi/src/main/java/io/quarkus/devui/spi/page/RawDataPageBuilder.java | {
"start": 43,
"end": 333
} | class ____ extends BuildTimeDataPageBuilder<RawDataPageBuilder> {
private static final String QWC_DATA_RAW_PAGE_JS = "qwc-data-raw-page.js";
protected RawDataPageBuilder(String title) {
super(title);
super.componentLink = QWC_DATA_RAW_PAGE_JS;
}
} | RawDataPageBuilder |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ext/javatime/deser/OffsetTimeDeserTest.java | {
"start": 1049,
"end": 1149
} | class ____ {
public String name;
public List<Pojo45> objects;
}
static | Pojo45s |
java | elastic__elasticsearch | x-pack/qa/runtime-fields/core-with-search/src/yamlRestTest/java/org/elasticsearch/xpack/runtimefields/test/search/CoreTestsWithSearchRuntimeFieldsIT.java | {
"start": 1443,
"end": 2146
} | class ____ extends ESClientYamlSuiteTestCase {
public CoreTestsWithSearchRuntimeFieldsIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return new SearchRequestRuntimeFieldTranslater().parameters();
}
/**
* Translating the tests is fairly difficult here because instead of ES
* tracking the mappings we have to track them. We don't have to do it as
* well as ES, just well enough that we can decorate the search requests
* with types that make most tests "just work".
*/
private static | CoreTestsWithSearchRuntimeFieldsIT |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/config/ConfigurationListener.java | {
"start": 950,
"end": 1037
} | interface ____ {
void onChange(Reconfigurable reconfigurable);
}
| ConfigurationListener |
java | junit-team__junit5 | documentation/src/test/java/example/testinterface/TestInterfaceDemo.java | {
"start": 484,
"end": 722
} | class ____ implements TestLifecycleLogger,
TimeExecutionLogger, TestInterfaceDynamicTestsDemo {
@Test
void isEqualValue() {
assertEquals(1, "a".length(), "is always equal");
}
}
// end::user_guide[]
// @formatter:on
| TestInterfaceDemo |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/test/java/org/springframework/boot/micrometer/metrics/autoconfigure/export/datadog/DatadogPropertiesConfigAdapterTests.java | {
"start": 1045,
"end": 2845
} | class ____
extends StepRegistryPropertiesConfigAdapterTests<DatadogProperties, DatadogPropertiesConfigAdapter> {
DatadogPropertiesConfigAdapterTests() {
super(DatadogPropertiesConfigAdapter.class);
}
@Override
protected DatadogProperties createProperties() {
return new DatadogProperties();
}
@Override
protected DatadogPropertiesConfigAdapter createConfigAdapter(DatadogProperties properties) {
return new DatadogPropertiesConfigAdapter(properties);
}
@Test
void whenPropertiesApiKeyIsSetAdapterApiKeyReturnsIt() {
DatadogProperties properties = createProperties();
properties.setApiKey("my-api-key");
assertThat(createConfigAdapter(properties).apiKey()).isEqualTo("my-api-key");
}
@Test
void whenPropertiesApplicationKeyIsSetAdapterApplicationKeyReturnsIt() {
DatadogProperties properties = createProperties();
properties.setApplicationKey("my-application-key");
assertThat(createConfigAdapter(properties).applicationKey()).isEqualTo("my-application-key");
}
@Test
void whenPropertiesDescriptionsIsSetAdapterDescriptionsReturnsIt() {
DatadogProperties properties = createProperties();
properties.setDescriptions(false);
assertThat(createConfigAdapter(properties).descriptions()).isEqualTo(false);
}
@Test
void whenPropertiesHostTagIsSetAdapterHostTagReturnsIt() {
DatadogProperties properties = createProperties();
properties.setHostTag("waldo");
assertThat(createConfigAdapter(properties).hostTag()).isEqualTo("waldo");
}
@Test
void whenPropertiesUriIsSetAdapterUriReturnsIt() {
DatadogProperties properties = createProperties();
properties.setUri("https://app.example.com/api/v1/series");
assertThat(createConfigAdapter(properties).uri()).isEqualTo("https://app.example.com/api/v1/series");
}
}
| DatadogPropertiesConfigAdapterTests |
java | apache__dubbo | dubbo-plugin/dubbo-mutiny/src/test/java/OneToOneMethodHandlerTest.java | {
"start": 1119,
"end": 1557
} | class ____ {
@Test
void testInvoke() throws ExecutionException, InterruptedException {
String request = "request";
OneToOneMethodHandler<String, String> handler =
new OneToOneMethodHandler<>(requestUni -> requestUni.map(r -> r + "Test"));
CompletableFuture<?> future = handler.invoke(new Object[] {request});
assertEquals("requestTest", future.get());
}
}
| OneToOneMethodHandlerTest |
java | apache__kafka | connect/runtime/src/main/java/org/apache/kafka/connect/runtime/isolation/PluginsRecommenders.java | {
"start": 6081,
"end": 7151
} | class ____ implements ConfigDef.Recommender {
protected Function<String, List<Object>> recommendations() {
return converterClass -> plugins.converters(converterClass).stream()
.map(PluginDesc::version).distinct().collect(Collectors.toList());
}
protected abstract String converterConfig();
@SuppressWarnings({"rawtypes"})
@Override
public List<Object> validValues(String name, Map<String, Object> parsedConfig) {
if (plugins == null) {
return List.of();
}
if (parsedConfig.get(converterConfig()) == null) {
return List.of();
}
Class converterClass = (Class) parsedConfig.get(converterConfig());
return recommendations().apply(converterClass.getName());
}
@Override
public boolean visible(String name, Map<String, Object> parsedConfig) {
return parsedConfig.get(converterConfig()) != null;
}
}
public | ConverterPluginVersionRecommender |
java | processing__processing4 | build/shared/tools/MovieMaker/src/ch/randelshofer/gui/datatransfer/StringTransferable.java | {
"start": 860,
"end": 2042
} | class ____ extends AbstractTransferable {
private String string;
public StringTransferable(String string) {
this(getDefaultFlavors(), string);
}
public StringTransferable(DataFlavor flavor, String string) {
this(new DataFlavor[] { flavor }, string);
}
public StringTransferable(DataFlavor[] flavors, String string) {
super(flavors);
this.string = string;
}
public Object getTransferData(DataFlavor flavor) throws UnsupportedFlavorException, IOException {
if (! isDataFlavorSupported(flavor)) {
throw new UnsupportedFlavorException(flavor);
}
return string;
}
protected static DataFlavor[] getDefaultFlavors() {
try {
return new DataFlavor[] {
new DataFlavor(DataFlavor.javaJVMLocalObjectMimeType+";class=java.lang.String"),
DataFlavor.stringFlavor
};
} catch (ClassNotFoundException cle) {
InternalError ie = new InternalError(
"error initializing StringTransferable");
ie.initCause(cle);
throw ie;
}
}
}
| StringTransferable |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/TotalRequestsThrottler.java | {
"start": 10305,
"end": 15633
} | class ____ {
private final String key;
private final Lock lock = new ReentrantLock();
private final DelayQueue<ThrottlePermit> delayQueue = new DelayQueue<>();
private final AtomicReference<ScheduledFuture<?>> cleanFuture = new AtomicReference<>();
private volatile int throttleRate;
ThrottlingState(String key) {
this.key = key;
}
public int getThrottleRate() {
return throttleRate;
}
public ThrottlePermit poll() {
return delayQueue.poll();
}
public ThrottlePermit peek() {
return delayQueue.peek();
}
public ThrottlePermit take() throws InterruptedException {
return delayQueue.take();
}
public void clean() {
states.remove(key);
}
/**
* Returns a permit to the DelayQueue, first resetting it's delay to be relative to now.
*/
public void enqueue(final ThrottlePermit permit, final Exchange exchange) {
permit.setDelayMs(getTimePeriodMillis());
delayQueue.put(permit);
try {
ScheduledFuture<?> next = asyncExecutor.schedule(this::clean, cleanPeriodMillis, TimeUnit.MILLISECONDS);
ScheduledFuture<?> prev = cleanFuture.getAndSet(next);
if (prev != null) {
prev.cancel(false);
}
// try and incur the least amount of overhead while releasing permits back to the queue
if (LOG.isTraceEnabled()) {
LOG.trace("Permit released, for exchangeId: {}", exchange.getExchangeId());
}
} catch (RejectedExecutionException e) {
LOG.debug("Throttling queue cleaning rejected", e);
}
}
/**
* Evaluates the maxRequestsPerPeriodExpression and adjusts the throttle rate up or down.
*/
public void calculateAndSetMaxRequestsPerPeriod(final Exchange exchange) throws Exception {
lock.lock();
try {
Integer newThrottle
= TotalRequestsThrottler.this.getMaximumRequestsExpression().evaluate(exchange, Integer.class);
if (newThrottle != null && newThrottle < 0) {
throw new IllegalStateException(
"The maximumRequestsPerPeriod must be a positive number, was: " + newThrottle);
}
if (newThrottle == null && throttleRate == 0) {
throw new RuntimeExchangeException(
"The maxRequestsPerPeriodExpression was evaluated as null: "
+ TotalRequestsThrottler.this.getMaximumRequestsExpression(),
exchange);
}
if (newThrottle != null) {
if (newThrottle != throttleRate) {
// decrease
if (throttleRate > newThrottle) {
int delta = throttleRate - newThrottle;
// discard any permits that are needed to decrease throttling
while (delta > 0) {
delayQueue.take();
delta--;
if (LOG.isTraceEnabled()) {
LOG.trace("Permit discarded due to throttling rate decrease, triggered by ExchangeId: {}",
exchange.getExchangeId());
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Throttle rate decreased from {} to {}, triggered by ExchangeId: {}", throttleRate,
newThrottle, exchange.getExchangeId());
}
// increase
} else if (newThrottle > throttleRate) {
int delta = newThrottle - throttleRate;
for (int i = 0; i < delta; i++) {
delayQueue.put(new ThrottlePermit(-1));
}
if (throttleRate == 0) {
if (LOG.isDebugEnabled()) {
LOG.debug("Initial throttle rate set to {}, triggered by ExchangeId: {}", newThrottle,
exchange.getExchangeId());
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Throttle rate increase from {} to {}, triggered by ExchangeId: {}", throttleRate,
newThrottle, exchange.getExchangeId());
}
}
}
throttleRate = newThrottle;
}
}
} finally {
lock.unlock();
}
}
}
/**
* Permit that implements the Delayed | ThrottlingState |
java | quarkusio__quarkus | independent-projects/qute/core/src/test/java/io/quarkus/qute/IfSectionTest.java | {
"start": 388,
"end": 15472
} | class ____ {
@Test
public void tesIfElse() {
Engine engine = Engine.builder().addDefaults().build();
Template template = engine.parse("{#if isActive}ACTIVE{#else}INACTIVE{/if}");
Map<String, Boolean> data = new HashMap<>();
data.put("isActive", Boolean.FALSE);
assertEquals("INACTIVE", template.render(data));
template = engine.parse("{#if isActive}ACTIVE{#else if valid}VALID{#else}NULL{/if}");
data.put("valid", Boolean.TRUE);
assertEquals("VALID", template.render(data));
}
@Test
public void testIfOperator() {
Engine engine = Engine.builder().addDefaults().build();
Map<String, Object> data = new HashMap<>();
data.put("name", "foo");
data.put("foo", "foo");
data.put("one", "1");
data.put("two", Integer.valueOf(2));
assertEquals("ACTIVE", engine.parse("{#if name eq foo}ACTIVE{#else}INACTIVE{/if}").render(data));
assertEquals("INACTIVE", engine.parse("{#if name != foo}ACTIVE{#else}INACTIVE{/if}").render(data));
assertEquals("OK", engine.parse("{#if one < two}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if one >= one}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if one >= 0}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if one == one}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if one is 2}NOK{#else if name eq foo}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if name is foo}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if two is 2}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if name != null}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if name is null}NOK{#else}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if !false}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if true && true}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if name is 'foo' && true}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if true && true && true}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if false || true}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if false || false || true}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if name or true}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if !(true && false)}OK{/if}").render(data));
assertEquals("OK", engine.parse("{#if two > 1 && two < 10}OK{/if}").render(data));
}
@Test
public void testNestedIf() {
Engine engine = Engine.builder().addDefaults().build();
Map<String, Object> data = new HashMap<>();
data.put("ok", true);
data.put("nok", false);
assertEquals("OK", engine.parse("{#if ok}{#if !nok}OK{/}{#else}NOK{/if}").render(data));
}
@Test
public void testCompositeParameters() {
Engine engine = Engine.builder().addDefaults().build();
assertEquals("OK", engine.parse("{#if (true || false) && true}OK{/if}").render());
assertEquals("OK", engine.parse("{#if (true || false) && true && !false}OK{/if}").render());
assertEquals("OK", engine.parse("{#if true && true && !(true || false)}NOK{#else}OK{/if}").render());
assertEquals("OK", engine.parse("{#if true && true && !(true && false)}OK{#else}NOK{/if}").render());
assertEquals("OK", engine.parse("{#if true && !true && (true || false)}NOK{#else}OK{/if}").render());
assertEquals("OK", engine.parse("{#if true && (true && ( true && false))}NOK{#else}OK{/if}").render());
assertEquals("OK", engine.parse("{#if true && (!false || false || (true || false))}OK{#else}NOK{/if}").render());
assertEquals("OK", engine.parse("{#if (foo.or(false) || false || true) && (true)}OK{/if}").render());
assertEquals("NOK", engine.parse("{#if foo.or(false) || false}OK{#else}NOK{/if}").render());
assertEquals("OK", engine.parse("{#if false || (foo.or(false) || (false || true))}OK{#else}NOK{/if}").render());
assertEquals("NOK", engine.parse("{#if (true && false)}OK{#else}NOK{/if}").render());
assertEquals("OK", engine.parse("{#if true && true}OK{#else}NOK{/if}").render());
assertEquals("NOK", engine.parse("{#if true && false}OK{#else}NOK{/if}").render());
assertEquals("NOK", engine.parse("{#if false && true}OK{#else}NOK{/if}").render());
assertEquals("OK", engine.parse("{#if true and (true or false)}OK{#else}NOK{/if}").render());
assertEquals("NOK", engine.parse("{#if true and (true == false)}OK{#else}NOK{/if}").render());
assertEquals("OK", engine.parse("{#if true && (false == false)}OK{#else}NOK{/if}").render());
Map<String, String> foo = new HashMap<>();
foo.put("bar", "something");
assertEquals("NOK",
engine.parse("{#if foo.bar != 'something' && foo.bar != 'other'}OK{#else}NOK{/if}").data("foo", foo).render());
assertEquals("OK",
engine.parse("{#if foo.bar != 'nothing' && foo.bar != 'other'}OK{#else}NOK{/if}").data("foo", foo).render());
assertEquals("OK",
engine.parse("{#if foo.bar == 'something' || foo.bar != 'other'}OK{#else}NOK{/if}").data("foo", foo).render());
assertEquals("OK", engine.parse("{#if (foo.bar == 'something') || (foo.bar == 'other')}OK{#else}NOK{/if}")
.data("foo", foo).render());
Map<String, String> qual = new HashMap<>();
Template template = engine.parse(
"{#if qual.name != 'javax.inject.Named' \n"
+ " && qual.name != 'javax.enterprise.inject.Any'\n"
+ " && qual.name != 'javax.enterprise.inject.Default'}{qual.name}{/if}");
qual.put("name", "org.acme.MyQual");
assertEquals("org.acme.MyQual", template
.data("qual", qual).render());
qual.put("name", "javax.enterprise.inject.Any");
assertEquals("", template
.data("qual", qual).render());
}
@Test
public void testParserErrors() {
// Missing operand
ParserTest.assertParserError("{#if foo >}{/}", IfSectionHelper.Code.BINARY_OPERATOR_MISSING_SECOND_OPERAND,
"Parser error: binary operator [GT] set but the second operand not present for {#if} section",
1);
}
@Test
public void testParameterParsing() {
List<Object> params = IfSectionHelper
.parseParams(Arrays.asList("item.price", ">", "10", "&&", "item.price", "<", "20"), null);
assertEquals(3, params.size());
assertEquals(Arrays.asList("item.price", Operator.GT, "10"), params.get(0));
assertEquals(Operator.AND, params.get(1));
assertEquals(Arrays.asList("item.price", Operator.LT, "20"), params.get(2));
params = IfSectionHelper
.parseParams(Arrays.asList("(item.price > 10)", "&&", "item.price", "<", "20"), null);
assertEquals(3, params.size());
assertEquals(Arrays.asList("item.price", Operator.GT, "10"), params.get(0));
assertEquals(Operator.AND, params.get(1));
assertEquals(Arrays.asList("item.price", Operator.LT, "20"), params.get(2));
params = IfSectionHelper
.parseParams(Arrays.asList("(item.price > 10)", "&&", "(item.price < 20)"), null);
assertEquals(3, params.size());
assertEquals(Arrays.asList("item.price", Operator.GT, "10"), params.get(0));
assertEquals(Operator.AND, params.get(1));
assertEquals(Arrays.asList("item.price", Operator.LT, "20"), params.get(2));
params = IfSectionHelper
.parseParams(Arrays.asList("name", "is", "'foo'", "&&", "true"), null);
assertEquals(3, params.size());
assertEquals(Arrays.asList("name", Operator.EQ, "'foo'"), params.get(0));
assertEquals(Operator.AND, params.get(1));
assertEquals("true", params.get(2));
}
@Test
public void testFalsy() {
Engine engine = Engine.builder().addDefaults().build();
Map<String, Object> data = new HashMap<>();
data.put("name", "foo");
data.put("nameEmpty", "");
data.put("boolTrue", true);
data.put("boolFalse", false);
data.put("intTwo", Integer.valueOf(2));
data.put("intZero", Integer.valueOf(0));
data.put("list", Collections.singleton("foo"));
data.put("setEmpty", Collections.emptySet());
data.put("mapEmpty", Collections.emptyMap());
data.put("array", new String[] { "foo" });
data.put("arrayEmpty", new String[] {});
data.put("optional", Optional.of("foo"));
data.put("optionalEmpty", Optional.empty());
assertEquals("1", engine.parse("{#if name}1{#else}0{/if}").render(data));
assertEquals("0", engine.parse("{#if nameEmpty}1{#else}0{/if}").render(data));
assertEquals("1", engine.parse("{#if boolTrue}1{#else}0{/if}").render(data));
assertEquals("0", engine.parse("{#if boolFalse}1{#else}0{/if}").render(data));
assertEquals("1", engine.parse("{#if intTwo}1{#else}0{/if}").render(data));
assertEquals("0", engine.parse("{#if intZero}1{#else}0{/if}").render(data));
assertEquals("1", engine.parse("{#if list}1{#else}0{/if}").render(data));
assertEquals("0", engine.parse("{#if setEmpty}1{#else}0{/if}").render(data));
assertEquals("0", engine.parse("{#if mapEmpty}1{#else}0{/if}").render(data));
assertEquals("1", engine.parse("{#if array}1{#else}0{/if}").render(data));
assertEquals("0", engine.parse("{#if arrayEmpty}1{#else}0{/if}").render(data));
assertEquals("1", engine.parse("{#if optional}1{#else}0{/if}").render(data));
assertEquals("0", engine.parse("{#if optionalEmpty}1{#else}0{/if}").render(data));
assertEquals("1", engine.parse("{#if !arrayEmpty}1{#else}0{/if}").render(data));
assertEquals("1", engine.parse("{#if arrayEmpty || name}1{#else}0{/if}").render(data));
assertEquals("0", engine.parse("{#if arrayEmpty && name}1{#else}0{/if}").render(data));
assertEquals("1", engine.parse("{#if array && intTwo}1{#else}0{/if}").render(data));
assertEquals("1", engine.parse("{#if (array && intZero) || true}1{#else}0{/if}").render(data));
assertEquals("0", engine.parse("{#if nonExistent.or(false)}1{#else}0{/if}").render(data));
assertEquals("1", engine.parse("{#if !nonExistent.or(false)}1{#else}0{/if}").render(data));
}
@Test
public void testStandaloneLines() {
Engine engine = Engine.builder().addDefaults().removeStandaloneLines(true).build();
assertEquals("BAZ\n",
engine.parse("{#if false}\n"
+ "FOO\n"
+ "{#else}\n"
+ "BAZ\n"
+ "{/if}").render());
}
@Test
public void testStandaloneLinesLinebreaks() {
Engine engine = Engine.builder().addDefaults().removeStandaloneLines(true).build();
assertEquals("FOO\n\n\n\n",
engine.parse("FOO\n\n\n\n").render());
assertEquals("FOO\n\n\n\n",
engine.parse("FOO\n\n{#if false}\nBAZ\n{/if}\n\n\n").render());
assertEquals("FOO\n\n",
engine.parse("FOO\n\n{#if false}\nBAZ\n{/if}\n").render());
}
@Test
public void testSafeExpression() {
Engine engine = Engine.builder().strictRendering(true).addDefaults().build();
try {
engine.parse("{#if val.is.not.there}NOK{#else}OK{/if}").render();
fail();
} catch (TemplateException expected) {
assertEquals(
"Rendering error: Key \"val\" not found in the template data map with keys [] in expression {val.is.not.there}",
expected.getMessage());
}
assertEquals("OK", engine.parse("{#if val.is.not.there??}NOK{#else}OK{/if}").render());
assertEquals("OK", engine.parse("{#if hero??}NOK{#else}OK{/if}").render());
assertEquals("OK", engine.parse("{#if hero??}OK{#else}NOK{/if}").data("hero", true).render());
}
@Test
public void testFromageCondition() {
Engine engine = Engine.builder().addDefaults().addValueResolver(new ReflectionValueResolver())
.addNamespaceResolver(NamespaceResolver.builder("ContentStatus").resolve(ec -> ContentStatus.NEW).build())
.build();
assertEquals("OK",
engine.parse("{#if user && target.status == ContentStatus:NEW && !target.voted(user)}NOK{#else}OK{/if}")
.data("user", "Stef", "target", new Target(ContentStatus.ACCEPTED)).render());
assertEquals("OK",
engine.parse("{#if user && target.status == ContentStatus:NEW && !target.voted(user)}OK{#else}NOK{/if}")
.data("user", "Stef", "target", new Target(ContentStatus.NEW)).render());
}
@Test
public void testParameterOrigin() {
Engine engine = Engine.builder().addDefaults().build();
Template template = engine.parse(" {#if item.price > 1}{/if}");
List<Expression> expressions = template.getExpressions();
assertEquals(2, expressions.size());
for (Expression expression : expressions) {
if (expression.isLiteral()) {
assertEquals(1, expression.getLiteralValue().getNow(false));
assertEquals(1, expression.getOrigin().getLine());
assertEquals(3, expression.getOrigin().getLineCharacterStart());
} else {
assertEquals("item.price", expression.toOriginalString());
assertEquals(1, expression.getOrigin().getLine());
assertEquals(3, expression.getOrigin().getLineCharacterStart());
}
}
}
@Test
public void testComparisons() {
Engine engine = Engine.builder().addDefaults().build();
assertEquals("longGtInt", engine.parse("{#if val > 10}longGtInt{/if}").data("val", 11l).render());
assertEquals("doubleGtInt", engine.parse("{#if val > 10}doubleGtInt{/if}").data("val", 20.0).render());
assertEquals("longGtStr", engine.parse("{#if val > '10'}longGtStr{/if}").data("val", 11l).render());
assertEquals("longLeStr", engine.parse("{#if val <= '10'}longLeStr{/if}").data("val", 1l).render());
assertEquals("longEqInt", engine.parse("{#if val == 10}longEqInt{/if}").data("val", 10l).render());
assertEquals("doubleEqInt", engine.parse("{#if val == 10}doubleEqInt{/if}").data("val", 10.0).render());
assertEquals("doubleEqFloat", engine.parse("{#if val == 10.00f}doubleEqFloat{/if}").data("val", 10.0).render());
assertEquals("longEqLong", engine.parse("{#if val eq 10l}longEqLong{/if}").data("val", Long.valueOf(10)).render());
}
public static | IfSectionTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUGIWithMiniKdc.java | {
"start": 1586,
"end": 4211
} | class ____ {
private static MiniKdc kdc;
@AfterEach
public void teardown() {
UserGroupInformation.reset();
if (kdc != null) {
kdc.stop();
}
}
private void setupKdc() throws Exception {
Properties kdcConf = MiniKdc.createConf();
// tgt expire time = 2 seconds. just testing that renewal thread retries
// for expiring tickets, so no need to waste time waiting for expiry to
// arrive.
kdcConf.setProperty(MiniKdc.MAX_TICKET_LIFETIME, "2");
kdcConf.setProperty(MiniKdc.MIN_TICKET_LIFETIME, "2");
File kdcDir = new File(System.getProperty("test.dir", "target"));
kdc = new MiniKdc(kdcConf, kdcDir);
kdc.start();
}
@Test
@Timeout(value = 120)
public void testAutoRenewalThreadRetryWithKdc() throws Exception {
GenericTestUtils.setLogLevel(UserGroupInformation.LOG, Level.DEBUG);
final Configuration conf = new Configuration();
// can't rely on standard kinit, else test fails when user running
// the test is kinit'ed because the test renews _their TGT_.
conf.set("hadoop.kerberos.kinit.command", "bogus-kinit-cmd");
// Relogin every 1 second
conf.setLong(HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN, 1);
SecurityUtil.setAuthenticationMethod(
UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
UserGroupInformation.setConfiguration(conf);
LoginContext loginContext = null;
try {
final String principal = "foo";
final File workDir = new File(System.getProperty("test.dir", "target"));
final File keytab = new File(workDir, "foo.keytab");
final Set<Principal> principals = new HashSet<>();
principals.add(new KerberosPrincipal(principal));
setupKdc();
kdc.createPrincipal(keytab, principal);
UserGroupInformation.loginUserFromKeytab(principal, keytab.getPath());
UserGroupInformation ugi = UserGroupInformation.getLoginUser();
// no ticket cache, so force the thread to test for failures.
ugi.spawnAutoRenewalThreadForUserCreds(true);
// Verify retry happens. Do not verify retry count to reduce flakiness.
// Detailed back-off logic is tested separately in
// TestUserGroupInformation#testGetNextRetryTime
LambdaTestUtils.await(30000, 500,
() -> {
final int count =
UserGroupInformation.metrics.getRenewalFailures().value();
UserGroupInformation.LOG.info("Renew failure count is {}", count);
return count > 0;
});
} finally {
if (loginContext != null) {
loginContext.logout();
}
}
}
}
| TestUGIWithMiniKdc |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/bind/ExecutableBinder.java | {
"start": 1210,
"end": 2701
} | interface ____<S> {
/**
* Binds a given {@link Executable} using the given registry and source object.
*
* @param target The target executable
* @param registry The registry to use
* @param source The binding source
* @param <T> The executable target type
* @param <R> The executable return type
* @return The bound executable
* @throws UnsatisfiedArgumentException When the executable could not be satisfied
*/
<T, R> BoundExecutable<T, R> bind(
Executable<T, R> target,
ArgumentBinderRegistry<S> registry,
S source
) throws UnsatisfiedArgumentException;
/**
* Binds a given {@link Executable} using the given registry and source object. Unlike {@link #bind(Executable, ArgumentBinderRegistry, Object)} this
* method will not throw an {@link UnsatisfiedArgumentException} if an argument cannot be bound. Instead, the {@link BoundExecutable#getUnboundArguments()} property
* will be populated with any arguments that could not be bound
*
* @param target The target executable
* @param registry The registry to use
* @param source The binding source
* @param <T> The executable target type
* @param <R> The executable return type
* @return The bound executable
*/
<T, R> BoundExecutable<T, R> tryBind(
Executable<T, R> target,
ArgumentBinderRegistry<S> registry,
S source
);
}
| ExecutableBinder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/criteria/CriteriaTypeValidationTests.java | {
"start": 1849,
"end": 2115
} | class ____ {
@Id
private Long id;
private String name;
@OneToMany(mappedBy = "parent", fetch = FetchType.EAGER, cascade = CascadeType.ALL, orphanRemoval = true)
private Set<Child> children = new HashSet<>();
}
@Entity(name = "Child")
public static | Parent |
java | google__guava | android/guava/src/com/google/common/base/FinalizableReferenceQueue.java | {
"start": 13495,
"end": 13843
} | class ____.
String urlString = finalizerUrl.toString();
if (!urlString.endsWith(finalizerPath)) {
throw new IOException("Unsupported path style: " + urlString);
}
urlString = urlString.substring(0, urlString.length() - finalizerPath.length());
return new URL(finalizerUrl, urlString);
}
/** Creates a | path |
java | quarkusio__quarkus | extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/NativeCheckedTemplateEnhancer.java | {
"start": 493,
"end": 609
} | class ____ implements BiFunction<String, ClassVisitor, ClassVisitor> {
private static | NativeCheckedTemplateEnhancer |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/config/plugins/convert/TypeConverters.java | {
"start": 11470,
"end": 11804
} | class ____ implements TypeConverter<Pattern> {
@Override
public Pattern convert(final String s) {
return Pattern.compile(s);
}
}
/**
* Converts a {@link String} into a {@link Provider}.
*/
@Plugin(name = "SecurityProvider", category = CATEGORY)
public static | PatternConverter |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/jdk/ThrowableDeserializer.java | {
"start": 674,
"end": 11715
} | class ____
extends BeanDeserializer // not the greatest idea but...
{
protected final static String PROP_NAME_MESSAGE = "message";
protected final static String PROP_NAME_SUPPRESSED = "suppressed";
protected final static String PROP_NAME_LOCALIZED_MESSAGE = "localizedMessage";
/*
/**********************************************************************
/* Life-cycle
/**********************************************************************
*/
protected ThrowableDeserializer(BeanDeserializer baseDeserializer) {
super(baseDeserializer);
// need to disable this, since we do post-processing
_vanillaProcessing = false;
}
public static ThrowableDeserializer construct(DeserializationContext ctxt,
BeanDeserializer baseDeserializer)
{
// 27-May-2022, tatu: TODO -- handle actual renaming of fields to support
// strategies like kebab- and snake-case where there are changes beyond
// simple upper-/lower-casing
/*
PropertyNamingStrategy pts = ctxt.getConfig().getPropertyNamingStrategy();
if (pts != null) {
}
*/
return new ThrowableDeserializer(baseDeserializer);
}
/**
* Alternative constructor used when creating "unwrapping" deserializers
*/
protected ThrowableDeserializer(BeanDeserializer src,
UnwrappedPropertyHandler unwrapHandler, PropertyBasedCreator pbCreator,
BeanPropertyMap renamedProperties,
boolean ignoreAllUnknown) {
super(src, unwrapHandler, pbCreator, renamedProperties, ignoreAllUnknown);
}
@Override
public ValueDeserializer<Object> unwrappingDeserializer(DeserializationContext ctxt,
NameTransformer transformer)
{
if (getClass() != ThrowableDeserializer.class) {
return this;
}
// main thing really is to just enforce ignoring of unknown properties; since
// there may be multiple unwrapped values and properties for all may be interleaved...
UnwrappedPropertyHandler uwHandler = _unwrappedPropertyHandler;
// delegate further unwraps, if any
if (uwHandler != null) {
uwHandler = uwHandler.renameAll(ctxt, transformer);
}
PropertyBasedCreator pbCreator = _propertyBasedCreator;
if (pbCreator != null) {
pbCreator = pbCreator.renameAll(ctxt, transformer);
}
// and handle direct unwrapping as well:
return new ThrowableDeserializer(this, uwHandler, pbCreator,
_beanProperties.renameAll(ctxt, transformer), true);
}
/*
/**********************************************************************
/* Overridden methods
/**********************************************************************
*/
@Override
public Object deserializeFromObject(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
// 30-Sep-2010, tatu: Need to allow use of @JsonCreator, so:
if (_propertyBasedCreator != null) { // proper @JsonCreator
return _deserializeUsingPropertyBased(p, ctxt);
}
if (_delegateDeserializer != null) {
return _valueInstantiator.createUsingDelegate(ctxt,
_delegateDeserializer.deserialize(p, ctxt));
}
if (_beanType.isAbstract()) { // for good measure, check this too
return ctxt.handleMissingInstantiator(handledType(), getValueInstantiator(), p,
"abstract type (need to add/enable type information?)");
}
boolean hasStringCreator = _valueInstantiator.canCreateFromString();
boolean hasDefaultCtor = _valueInstantiator.canCreateUsingDefault();
// and finally, verify we do have single-String arg constructor (if no @JsonCreator)
if (!hasStringCreator && !hasDefaultCtor) {
return ctxt.handleMissingInstantiator(handledType(), getValueInstantiator(), p,
"Throwable needs a default constructor, a single-String-arg constructor; or explicit @JsonCreator");
}
Throwable throwable = null;
Object[] pending = null;
Throwable[] suppressed = null;
int pendingIx = 0;
int ix = p.currentNameMatch(_propNameMatcher);
for (; ; ix = p.nextNameMatch(_propNameMatcher)) {
if (ix >= 0) {
p.nextToken();
SettableBeanProperty prop = _propsByIndex[ix];
if (throwable != null) {
// 07-Dec-2023, tatu: [databind#4248] Interesting that "cause"
// with `null` blows up. So, avoid.
if ("cause".equals(prop.getName())
&& p.hasToken(JsonToken.VALUE_NULL)) {
continue;
}
prop.deserializeAndSet(p, ctxt, throwable);
continue;
}
// nope; need to defer
if (pending == null) {
int len = _beanProperties.size();
pending = new Object[len + len];
} else if (pendingIx == pending.length) {
// NOTE: only occurs with duplicate properties, possible
// with some formats (most notably XML; but possibly with
// JSON if duplicate detection not enabled). Most likely
// only occurs with malicious content so use linear buffer
// resize (no need to optimize performance)
pending = Arrays.copyOf(pending, pendingIx + 16);
}
pending[pendingIx++] = prop;
pending[pendingIx++] = prop.deserialize(p, ctxt);
continue;
}
if (ix != PropertyNameMatcher.MATCH_UNKNOWN_NAME) {
if (ix == PropertyNameMatcher.MATCH_END_OBJECT) {
break;
}
return _handleUnexpectedWithin(p, ctxt, throwable);
}
// Maybe it's "message"?
String propName = p.currentName();
p.nextToken();
// 26-May-2022, tatu: [databind#3497] To support property naming strategies,
// should ideally mangle property names. But for now let's cheat; works
// for case-changing although not for kebab/snake cases and "localizedMessage"
if (PROP_NAME_MESSAGE.equalsIgnoreCase(propName)) {
throwable = _instantiate(ctxt, hasStringCreator, p.getValueAsString());
// any pending values?
if (pending != null) {
for (int i = 0, len = pendingIx; i < len; i += 2) {
SettableBeanProperty prop = (SettableBeanProperty)pending[i];
prop.set(ctxt, throwable, pending[i+1]);
}
pending = null;
}
continue;
}
// Things marked as ignorable should not be passed to any setter
if ((_ignorableProps != null) && _ignorableProps.contains(propName)) {
p.skipChildren();
continue;
}
if (PROP_NAME_SUPPRESSED.equalsIgnoreCase(propName)) { // or "suppressed"?
// 07-Dec-2023, tatu: Not sure how/why, but JSON Null is otherwise
// not handled with such call so...
if (p.hasToken(JsonToken.VALUE_NULL)) {
suppressed = null;
} else {
// Inlined `DeserializationContext.readValue()` to minimize call depth
ValueDeserializer<Object> deser = ctxt.findRootValueDeserializer(
ctxt.constructType(Throwable[].class));
suppressed = (Throwable[]) deser.deserialize(p, ctxt);
}
continue;
}
if (PROP_NAME_LOCALIZED_MESSAGE.equalsIgnoreCase(propName)) {
p.skipChildren();
continue;
}
if (_anySetter != null) {
// [databind#4316] Since 2.16.2 : at this point throwable should be non-null
if (throwable == null) {
throwable = _instantiate(ctxt, hasStringCreator, null);
}
_anySetter.deserializeAndSet(p, ctxt, throwable, propName);
continue;
}
// 23-Jan-2018, tatu: One concern would be `message`, but without any-setter or single-String-ctor
// (or explicit constructor). We could just ignore it but for now, let it fail
// [databind#4071]: In case of "message", skip for default constructor
if (PROP_NAME_MESSAGE.equalsIgnoreCase(propName)) {
p.skipChildren();
continue;
}
// Unknown: let's call handler method
handleUnknownProperty(p, ctxt, throwable, propName);
}
// Sanity check: did we find "message"?
if (throwable == null) {
throwable = _instantiate(ctxt, hasStringCreator, null);
}
// any pending values?
if (pending != null) {
for (int i = 0, len = pendingIx; i < len; i += 2) {
SettableBeanProperty prop = (SettableBeanProperty)pending[i];
prop.set(ctxt, throwable, pending[i+1]);
}
}
// any suppressed exceptions?
if (suppressed != null) {
for (Throwable s : suppressed) {
// 13-Dec-2023, tatu: But skip any `null` entries we might have gotten
if (s != null) {
throwable.addSuppressed(s);
}
}
}
return throwable;
}
/*
/**********************************************************
/* Internal helper methods
/**********************************************************
*/
/**
* Helper method to initialize Throwable
*
* @since 2.16.2
*/
private Throwable _instantiate(DeserializationContext ctxt, boolean hasStringCreator, String valueAsString)
{
/* 15-Oct-2010, tatu: Can't assume missing message is an error, since it may be
* suppressed during serialization.
*
* Should probably allow use of default constructor, too...
*/
//throw new XxxException("No 'message' property found: could not deserialize "+_beanType);
if (hasStringCreator) {
if (valueAsString != null) {
return (Throwable) _valueInstantiator.createFromString(ctxt, valueAsString);
} else {
return (Throwable) _valueInstantiator.createFromString(ctxt, null);
}
} else {
return (Throwable) _valueInstantiator.createUsingDefault(ctxt);
}
}
}
| ThrowableDeserializer |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/checkreturnvalue/CanIgnoreReturnValueSuggesterTest.java | {
"start": 6882,
"end": 7343
} | class ____ {
public static StringBuilder append(StringBuilder input, String name) {
input.append("name = ").append(name);
return input;
}
}
""")
.addOutputLines(
"ReturnInputParam.java",
"""
package com.google.frobber;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
public final | ReturnInputParam |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/ShouldContainsOnlyOnce.java | {
"start": 1154,
"end": 4368
} | class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldContainsOnlyOnce}</code>.
*
* @param actual the actual value in the failed assertion.
* @param expected values expected to be contained in {@code actual}.
* @param notFound values in {@code expected} not found in {@code actual}.
* @param notOnlyOnce values in {@code actual} that were not only once in {@code expected}.
* @param comparisonStrategy the {@link ComparisonStrategy} used to evaluate assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldContainsOnlyOnce(Object actual, Object expected, Set<?> notFound,
Set<?> notOnlyOnce, ComparisonStrategy comparisonStrategy) {
if (!isNullOrEmpty(notFound) && !isNullOrEmpty(notOnlyOnce))
return new ShouldContainsOnlyOnce(actual, expected, notFound, notOnlyOnce, comparisonStrategy);
if (!isNullOrEmpty(notFound))
return new ShouldContainsOnlyOnce(actual, expected, notFound, comparisonStrategy);
// case where no elements were missing but some appeared more than once.
return new ShouldContainsOnlyOnce(notOnlyOnce, actual, expected, comparisonStrategy);
}
/**
* Creates a new <code>{@link ShouldContainsOnlyOnce}</code>.
*
* @param actual the actual value in the failed assertion.
* @param expected values expected to be contained in {@code actual}.
* @param notFound values in {@code expected} not found in {@code actual}.
* @param notOnlyOnce values in {@code actual} that were found not only once in {@code expected}.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldContainsOnlyOnce(Object actual, Object expected, Set<?> notFound,
Set<?> notOnlyOnce) {
return shouldContainsOnlyOnce(actual, expected, notFound, notOnlyOnce, StandardComparisonStrategy.instance());
}
private ShouldContainsOnlyOnce(Object actual, Object expected, Set<?> notFound, Set<?> notOnlyOnce,
ComparisonStrategy comparisonStrategy) {
super("%nExpecting actual:%n %s%nto contain only once:%n %s%n"
+ "but some elements were not found:%n %s%n"
+ "and others were found more than once:%n %s%n%s",
actual, expected, notFound, notOnlyOnce, comparisonStrategy);
}
private ShouldContainsOnlyOnce(Object actual, Object expected, Set<?> notFound, ComparisonStrategy comparisonStrategy) {
super("%nExpecting actual:%n %s%nto contain only once:%n %s%nbut some elements were not found:%n %s%n%s",
actual, expected, notFound, comparisonStrategy);
}
// change the order of parameters to avoid confusion with previous constructor
private ShouldContainsOnlyOnce(Set<?> notOnlyOnce, Object actual, Object expected,
ComparisonStrategy comparisonStrategy) {
super("%nExpecting actual:%n %s%nto contain only once:%n %s%nbut some elements were found more than once:%n %s%n%s",
actual, expected, notOnlyOnce, comparisonStrategy);
}
}
| ShouldContainsOnlyOnce |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/DefaultListableBeanFactoryTests.java | {
"start": 135696,
"end": 136735
} | class ____ implements BeanNameAware {
public TestBean spouse;
public int spouseAge;
private String beanName;
public ConstructorDependency(TestBean spouse) {
this.spouse = spouse;
}
public ConstructorDependency(int spouseAge) {
this.spouseAge = spouseAge;
}
@SuppressWarnings("unused")
private ConstructorDependency(TestBean spouse, TestBean otherSpouse) {
throw new IllegalArgumentException("Should never be called");
}
@Override
public void setBeanName(String name) {
this.beanName = name;
}
@Override
public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ConstructorDependency that = (ConstructorDependency) o;
return spouseAge == that.spouseAge &&
Objects.equals(spouse, that.spouse) &&
Objects.equals(beanName, that.beanName);
}
@Override
public int hashCode() {
return Objects.hash(spouse, spouseAge, beanName);
}
}
public static | ConstructorDependency |
java | elastic__elasticsearch | plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KatakanaUppercaseFilterFactory.java | {
"start": 853,
"end": 1230
} | class ____ extends AbstractTokenFilterFactory {
public KatakanaUppercaseFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name);
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new JapaneseKatakanaUppercaseFilter(tokenStream);
}
}
| KatakanaUppercaseFilterFactory |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/compositefk/LazyManyToOneEmbeddedIdWithToOneFKTest.java | {
"start": 7039,
"end": 7656
} | class ____ {
@Id
private Integer id;
private String name;
@ManyToOne(fetch = FetchType.LAZY)
SystemUser user;
public System() {
}
public System(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public SystemUser getUser() {
return user;
}
public void setUser(SystemUser user) {
this.user = user;
}
}
@Entity(name = "SystemUser")
public static | System |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/convention/TestBeanOverrideHandlerTests.java | {
"start": 1512,
"end": 5382
} | class ____ {
@Test
void beanNameIsSetToNullIfAnnotationNameIsEmpty() {
List<BeanOverrideHandler> handlers = BeanOverrideTestUtils.findHandlers(SampleOneOverride.class);
assertThat(handlers).singleElement().extracting(BeanOverrideHandler::getBeanName).isNull();
}
@Test
void beanNameIsSetToAnnotationName() {
List<BeanOverrideHandler> handlers = BeanOverrideTestUtils.findHandlers(SampleOneOverrideWithName.class);
assertThat(handlers).singleElement().extracting(BeanOverrideHandler::getBeanName).isEqualTo("anotherBean");
}
@Test
void failsWithMissingMethod() {
assertThatIllegalStateException()
.isThrownBy(() -> BeanOverrideTestUtils.findHandlers(SampleMissingMethod.class))
.withMessage("No static method found named message() in %s with return type %s",
SampleMissingMethod.class.getName(), String.class.getName());
}
@Test
void isEqualToWithSameInstance() {
TestBeanOverrideHandler handler = handlerFor(sampleField("message"), sampleMethod("message"));
assertThat(handler).isEqualTo(handler);
assertThat(handler).hasSameHashCodeAs(handler);
}
@Test
void isEqualToWithSameMetadata() {
TestBeanOverrideHandler handler1 = handlerFor(sampleField("message"), sampleMethod("message"));
TestBeanOverrideHandler handler2 = handlerFor(sampleField("message"), sampleMethod("message"));
assertThat(handler1).isEqualTo(handler2);
assertThat(handler1).hasSameHashCodeAs(handler2);
}
@Test
void isEqualToWithSameMetadataByNameLookupAndDifferentField() {
TestBeanOverrideHandler handler1 = handlerFor(sampleField("message3"), sampleMethod("message"));
TestBeanOverrideHandler handler2 = handlerFor(sampleField("message4"), sampleMethod("message"));
assertThat(handler1).isEqualTo(handler2);
assertThat(handler1).hasSameHashCodeAs(handler2);
}
@Test
void isNotEqualToWithSameMetadataByTypeLookupAndDifferentField() {
TestBeanOverrideHandler handler1 = handlerFor(sampleField("message"), sampleMethod("message"));
TestBeanOverrideHandler handler2 = handlerFor(sampleField("message2"), sampleMethod("message"));
assertThat(handler1).isNotEqualTo(handler2);
}
@Test
void isNotEqualToWithSameMetadataButDifferentBeanName() {
TestBeanOverrideHandler handler1 = handlerFor(sampleField("message"), sampleMethod("message"));
TestBeanOverrideHandler handler2 = handlerFor(sampleField("message3"), sampleMethod("message"));
assertThat(handler1).isNotEqualTo(handler2);
}
@Test
void isNotEqualToWithSameMetadataButDifferentMethod() {
TestBeanOverrideHandler handler1 = handlerFor(sampleField("message"), sampleMethod("message"));
TestBeanOverrideHandler handler2 = handlerFor(sampleField("message"), sampleMethod("description"));
assertThat(handler1).isNotEqualTo(handler2);
}
@Test
void isNotEqualToWithSameMetadataButDifferentAnnotations() {
TestBeanOverrideHandler handler1 = handlerFor(sampleField("message"), sampleMethod("message"));
TestBeanOverrideHandler handler2 = handlerFor(sampleField("message5"), sampleMethod("message"));
assertThat(handler1).isNotEqualTo(handler2);
}
private static Field sampleField(String fieldName) {
Field field = ReflectionUtils.findField(Sample.class, fieldName);
assertThat(field).isNotNull();
return field;
}
private static Method sampleMethod(String noArgMethodName) {
Method method = ReflectionUtils.findMethod(Sample.class, noArgMethodName);
assertThat(method).isNotNull();
return method;
}
private static TestBeanOverrideHandler handlerFor(Field field, Method overrideMethod) {
TestBean annotation = field.getAnnotation(TestBean.class);
String beanName = (StringUtils.hasText(annotation.name()) ? annotation.name() : null);
return new TestBeanOverrideHandler(
field, ResolvableType.forClass(field.getType()), beanName, "", BeanOverrideStrategy.REPLACE, overrideMethod);
}
static | TestBeanOverrideHandlerTests |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/framework/ReflectiveMethodInvocation.java | {
"start": 1898,
"end": 2151
} | class ____ considered internal and should not be
* directly accessed. The sole reason for it being public is compatibility
* with existing framework integrations (for example, Pitchfork). For any other
* purposes, use the {@link ProxyMethodInvocation} | is |
java | quarkusio__quarkus | extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/graal/context/Substitute_ThreadLocalSessionContext.java | {
"start": 428,
"end": 921
} | class ____ {
@Substitute
public Substitute_ThreadLocalSessionContext(SessionFactoryImplementor factory) {
throw new UnsupportedOperationException(
"This build of Hibernate ORM doesn't support 'thread' value for configuration property"
+ AvailableSettings.CURRENT_SESSION_CONTEXT_CLASS);
}
@Substitute
public Session currentSession() throws HibernateException {
return null;
}
}
| Substitute_ThreadLocalSessionContext |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/results/graph/embeddable/internal/EmbeddableAssembler.java | {
"start": 594,
"end": 1939
} | class ____ implements DomainResultAssembler {
protected final EmbeddableInitializer<InitializerData> initializer;
public EmbeddableAssembler(EmbeddableInitializer<?> initializer) {
this.initializer = (EmbeddableInitializer<InitializerData>) initializer;
}
@Override
public JavaType getAssembledJavaType() {
return initializer.getInitializedPart().getJavaType();
}
@Override
public Object assemble(RowProcessingState rowProcessingState) {
final InitializerData data = initializer.getData( rowProcessingState );
final Initializer.State state = data.getState();
if ( state == Initializer.State.UNINITIALIZED ) {
initializer.resolveKey( data );
}
if ( state == Initializer.State.KEY_RESOLVED ) {
initializer.resolveInstance( data );
}
return initializer.getResolvedInstance( data );
}
@Override
public void resolveState(RowProcessingState rowProcessingState) {
// use resolveState instead of initialize instance to avoid
// unneeded embeddable instantiation and injection
initializer.resolveState( rowProcessingState );
}
@Override
public EmbeddableInitializer<?> getInitializer() {
return initializer;
}
@Override
public void forEachResultAssembler(BiConsumer consumer, Object arg) {
if ( initializer.isResultInitializer() ) {
consumer.accept( initializer, arg );
}
}
}
| EmbeddableAssembler |
java | quarkusio__quarkus | extensions/agroal/deployment/src/test/java/io/quarkus/agroal/test/MultipleDataSourcesTestUtil.java | {
"start": 323,
"end": 1308
} | class ____ {
private MultipleDataSourcesTestUtil() {
}
static void testDataSource(String dataSourceName, AgroalDataSource dataSource, String jdbcUrl, String username,
int maxSize)
throws SQLException {
AgroalConnectionPoolConfiguration configuration = null;
try {
configuration = dataSource.getConfiguration().connectionPoolConfiguration();
} catch (NullPointerException e) {
// we catch the NPE here as we have a proxy and we can't test dataSource directly
fail("Datasource " + dataSourceName + " should not be null");
}
assertEquals(jdbcUrl, configuration.connectionFactoryConfiguration().jdbcUrl());
assertEquals(username, configuration.connectionFactoryConfiguration().principal().getName());
assertEquals(maxSize, configuration.maxSize());
try (Connection connection = dataSource.getConnection()) {
}
}
}
| MultipleDataSourcesTestUtil |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/matching/EndingSlashTest.java | {
"start": 600,
"end": 1529
} | class ____ {
@RegisterExtension
static ResteasyReactiveUnitTest test = new ResteasyReactiveUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(TestResource.class);
}
});
@Test
public void test() {
get("/hello/world")
.then()
.statusCode(200)
.body(equalTo("Hello World!"));
get("/hello/world/")
.then()
.statusCode(200)
.body(equalTo("Hello World!"));
get("/hello/world/1")
.then()
.statusCode(404);
get("/hello/world/22")
.then()
.statusCode(404);
}
@Path("/hello")
public static | EndingSlashTest |
java | spring-projects__spring-boot | core/spring-boot-docker-compose/src/main/java/org/springframework/boot/docker/compose/core/DockerCliCommand.java | {
"start": 4239,
"end": 4518
} | class ____ extends DockerCliCommand<DockerCliComposeConfigResponse> {
ComposeConfig() {
super(Type.DOCKER_COMPOSE, DockerCliComposeConfigResponse.class, false, "config", "--format=json");
}
}
/**
* The {@code docker compose ps} command.
*/
static final | ComposeConfig |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java | {
"start": 1267,
"end": 32251
} | class ____ extends ESTestCase {
private IngestDocument ingestDocument;
@Before
public void init() {
ingestDocument = TestIngestDocument.emptyIngestDocument();
}
// need to (randomly?) mix sync and async processors
// verify that sync execute method throws
public void testEmpty() throws Exception {
CompoundProcessor processor = new CompoundProcessor();
assertThat(processor.getProcessors().isEmpty(), is(true));
assertThat(processor.getOnFailureProcessors().isEmpty(), is(true));
executeCompound(processor, ingestDocument, (result, e) -> {});
}
public void testSingleProcessor() throws Exception {
boolean isAsync = randomBoolean();
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(1));
TestProcessor processor = new TestProcessor(ingestDocument -> {
assertStats(0, ingestDocument.getFieldValue("compoundProcessor", CompoundProcessor.class), 1, 0, 0, 0);
}) {
@Override
public boolean isAsync() {
return isAsync;
}
};
CompoundProcessor compoundProcessor = new CompoundProcessor(false, List.of(processor), List.of(), relativeTimeProvider);
ingestDocument.setFieldValue("compoundProcessor", compoundProcessor); // ugly hack to assert current count = 1
assertThat(compoundProcessor.getProcessors().size(), equalTo(1));
assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor));
assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor));
assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true));
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {});
verify(relativeTimeProvider, times(2)).getAsLong();
assertThat(processor.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 0, 1);
}
public void testSingleProcessorWithException() throws Exception {
TestProcessor processor = new TestProcessor(new RuntimeException("error"));
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor compoundProcessor = new CompoundProcessor(false, List.of(processor), List.of(), relativeTimeProvider);
assertThat(compoundProcessor.getProcessors().size(), equalTo(1));
assertThat(compoundProcessor.getProcessors().get(0), sameInstance(processor));
assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true));
Exception[] holder = new Exception[1];
executeCompound(compoundProcessor, ingestDocument, (result, e) -> holder[0] = e);
assertThat(((ElasticsearchException) holder[0]).getRootCause().getMessage(), equalTo("error"));
assertThat(processor.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 0);
}
public void testIgnoreFailure() throws Exception {
TestProcessor processor1 = getTestProcessor(null, randomBoolean(), true);
TestProcessor processor2 = new TestProcessor(ingestDocument -> { ingestDocument.setFieldValue("field", "value"); });
TestProcessor processor3 = new TestProcessor(ingestDocument -> fail("ignoreFailure is true, processor shouldn't be called"));
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor compoundProcessor = new CompoundProcessor(
true,
List.of(processor1, processor2),
List.of(processor3), // when ignoreFailure is true, onFailureProcessors are not called (regardless of whether a failure occurs)
relativeTimeProvider
);
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {});
assertThat(processor1.getInvokedCounter(), equalTo(1));
assertStats(0, compoundProcessor, 0, 1, 1, 0);
assertThat(processor2.getInvokedCounter(), equalTo(1));
assertStats(1, compoundProcessor, 0, 1, 0, 0);
assertThat(ingestDocument.getFieldValue("field", String.class), equalTo("value"));
}
public void testSingleProcessorWithOnFailureProcessor() throws Exception {
TestProcessor processor1 = new TestProcessor("id", "first", null, new RuntimeException("error"));
TestProcessor processor2 = new TestProcessor(ingestDocument -> {
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
assertThat(ingestMetadata.size(), equalTo(3));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id"));
});
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(1));
CompoundProcessor compoundProcessor = new CompoundProcessor(false, List.of(processor1), List.of(processor2), relativeTimeProvider);
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {});
verify(relativeTimeProvider, times(2)).getAsLong();
assertThat(processor1.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 1);
assertThat(processor2.getInvokedCounter(), equalTo(1));
}
public void testSingleProcessorWithOnFailureDropProcessor() throws Exception {
TestProcessor processor1 = new TestProcessor("id", "first", null, new RuntimeException("error"));
Processor processor2 = new Processor() {
@Override
public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
// Simulates the drop processor
return null;
}
@Override
public String getType() {
return "drop";
}
@Override
public String getTag() {
return null;
}
@Override
public String getDescription() {
return null;
}
};
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor compoundProcessor = new CompoundProcessor(false, List.of(processor1), List.of(processor2), relativeTimeProvider);
IngestDocument[] result = new IngestDocument[1];
executeCompound(compoundProcessor, ingestDocument, (r, e) -> result[0] = r);
assertThat(result[0], nullValue());
assertThat(processor1.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 0);
}
public void testSingleProcessorWithNestedFailures() throws Exception {
TestProcessor processor = new TestProcessor("id", "first", null, new RuntimeException("error"));
TestProcessor processorToFail = new TestProcessor("id2", "second", null, (Consumer<IngestDocument>) ingestDocument -> {
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
assertThat(ingestMetadata.size(), equalTo(3));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id"));
throw new RuntimeException("error");
});
TestProcessor lastProcessor = new TestProcessor(ingestDocument -> {
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
assertThat(ingestMetadata.size(), equalTo(3));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("second"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id2"));
});
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor compoundOnFailProcessor = new CompoundProcessor(
false,
List.of(processorToFail),
List.of(lastProcessor),
relativeTimeProvider
);
CompoundProcessor compoundProcessor = new CompoundProcessor(
false,
List.of(processor),
List.of(compoundOnFailProcessor),
relativeTimeProvider
);
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {});
assertThat(processorToFail.getInvokedCounter(), equalTo(1));
assertThat(lastProcessor.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 0);
}
public void testNestedOnFailureHandlers() {
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
TestProcessor firstFailingProcessor = new TestProcessor("id1", "first", null, new RuntimeException("first failure"));
TestProcessor onFailure1 = new TestProcessor("id2", "second", null, ingestDocument -> {
ingestDocument.setFieldValue("foofield", "exists");
});
TestProcessor onFailure2 = new TestProcessor("id3", "third", null, new RuntimeException("onfailure2"));
TestProcessor onFailure2onFailure = new TestProcessor("id4", "4th", null, ingestDocument -> {
ingestDocument.setFieldValue("foofield2", "ran");
});
CompoundProcessor of2 = new CompoundProcessor(false, List.of(onFailure2), List.of(onFailure2onFailure), relativeTimeProvider);
CompoundProcessor compoundOnFailProcessor = new CompoundProcessor(false, List.of(onFailure1, of2), List.of(), relativeTimeProvider);
CompoundProcessor compoundProcessor = new CompoundProcessor(
false,
List.of(firstFailingProcessor),
List.of(compoundOnFailProcessor),
relativeTimeProvider
);
IngestDocument[] docHolder = new IngestDocument[1];
Exception[] exHolder = new Exception[1];
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {
docHolder[0] = result;
exHolder[0] = e;
});
assertThat(onFailure1.getInvokedCounter(), equalTo(1));
assertThat(onFailure2.getInvokedCounter(), equalTo(1));
assertThat(onFailure2onFailure.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 0);
assertThat(docHolder[0], notNullValue());
assertThat(exHolder[0], nullValue());
assertThat(docHolder[0].getFieldValue("foofield", String.class), equalTo("exists"));
assertThat(docHolder[0].getFieldValue("foofield2", String.class), equalTo("ran"));
}
public void testCompoundProcessorExceptionFailWithoutOnFailure() throws Exception {
TestProcessor firstProcessor = new TestProcessor("id1", "first", null, new RuntimeException("error"));
TestProcessor secondProcessor = new TestProcessor("id3", "second", null, ingestDocument -> {
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
assertThat(ingestMetadata.entrySet(), hasSize(3));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id1"));
});
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor failCompoundProcessor = new CompoundProcessor(false, List.of(firstProcessor), List.of(), relativeTimeProvider);
CompoundProcessor compoundProcessor = new CompoundProcessor(
false,
List.of(failCompoundProcessor),
List.of(secondProcessor),
relativeTimeProvider
);
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {});
assertThat(firstProcessor.getInvokedCounter(), equalTo(1));
assertThat(secondProcessor.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 0);
}
public void testCompoundProcessorExceptionFail() throws Exception {
TestProcessor firstProcessor = new TestProcessor("id1", "first", null, new RuntimeException("error"));
TestProcessor failProcessor = new TestProcessor("tag_fail", "fail", null, new RuntimeException("custom error message"));
TestProcessor secondProcessor = new TestProcessor("id3", "second", null, ingestDocument -> {
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
assertThat(ingestMetadata.entrySet(), hasSize(3));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("custom error message"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("fail"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("tag_fail"));
});
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor failCompoundProcessor = new CompoundProcessor(
false,
List.of(firstProcessor),
List.of(failProcessor),
relativeTimeProvider
);
CompoundProcessor compoundProcessor = new CompoundProcessor(
false,
List.of(failCompoundProcessor),
List.of(secondProcessor),
relativeTimeProvider
);
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {});
assertThat(firstProcessor.getInvokedCounter(), equalTo(1));
assertThat(secondProcessor.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 0);
}
public void testCompoundProcessorExceptionFailInOnFailure() throws Exception {
TestProcessor firstProcessor = new TestProcessor("id1", "first", null, new RuntimeException("error"));
TestProcessor failProcessor = new TestProcessor("tag_fail", "fail", null, new RuntimeException("custom error message"));
TestProcessor secondProcessor = new TestProcessor("id3", "second", null, ingestDocument -> {
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
assertThat(ingestMetadata.entrySet(), hasSize(3));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("custom error message"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("fail"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("tag_fail"));
});
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor failCompoundProcessor = new CompoundProcessor(
false,
List.of(firstProcessor),
List.of(new CompoundProcessor(false, List.of(failProcessor), List.of(), relativeTimeProvider))
);
CompoundProcessor compoundProcessor = new CompoundProcessor(
false,
List.of(failCompoundProcessor),
List.of(secondProcessor),
relativeTimeProvider
);
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {});
assertThat(firstProcessor.getInvokedCounter(), equalTo(1));
assertThat(secondProcessor.getInvokedCounter(), equalTo(1));
assertStats(compoundProcessor, 1, 1, 0);
}
public void testBreakOnFailure() throws Exception {
TestProcessor firstProcessor = new TestProcessor("id1", "first", null, new RuntimeException("error1"));
TestProcessor secondProcessor = new TestProcessor("id2", "second", null, new RuntimeException("error2"));
TestProcessor onFailureProcessor = new TestProcessor("id2", "on_failure", null, ingestDocument -> {});
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor pipeline = new CompoundProcessor(
false,
List.of(firstProcessor, secondProcessor),
List.of(onFailureProcessor),
relativeTimeProvider
);
executeCompound(pipeline, ingestDocument, (result, e) -> {});
assertThat(firstProcessor.getInvokedCounter(), equalTo(1));
assertThat(secondProcessor.getInvokedCounter(), equalTo(0));
assertThat(onFailureProcessor.getInvokedCounter(), equalTo(1));
assertStats(pipeline, 1, 1, 0);
}
public void testFailureProcessorIsInvokedOnFailure() {
TestProcessor onFailureProcessor = new TestProcessor(null, "on_failure", null, ingestDocument -> {
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
assertThat(ingestMetadata.entrySet(), hasSize(5));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("failure!"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("test-processor"));
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), nullValue());
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PIPELINE_FIELD), equalTo("2"));
assertThat(ingestMetadata.get("pipeline"), equalTo("1"));
});
Pipeline pipeline2 = new Pipeline(
"2",
null,
null,
null,
new CompoundProcessor(new TestProcessor(new RuntimeException("failure!")))
);
Pipeline pipeline1 = new Pipeline("1", null, null, null, new CompoundProcessor(false, List.of(new AbstractProcessor(null, null) {
@Override
public void execute(IngestDocument ingestDocument, BiConsumer<IngestDocument, Exception> handler) {
throw new AssertionError();
}
@Override
public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
IngestDocument[] result = new IngestDocument[1];
Exception[] error = new Exception[1];
ingestDocument.executePipeline(pipeline2, (document, e) -> {
result[0] = document;
error[0] = e;
});
if (error[0] != null) {
throw error[0];
}
return result[0];
}
@Override
public String getType() {
return "pipeline";
}
}), List.of(onFailureProcessor)));
ingestDocument.executePipeline(pipeline1, (document, e) -> {
assertThat(document, notNullValue());
assertThat(e, nullValue());
});
assertThat(onFailureProcessor.getInvokedCounter(), equalTo(1));
}
public void testNewCompoundProcessorException() {
TestProcessor processor = new TestProcessor("my_tag", "my_type", null, new RuntimeException());
IngestProcessorException ingestProcessorException1 = CompoundProcessor.newCompoundProcessorException(
new RuntimeException(),
processor,
ingestDocument
);
assertThat(ingestProcessorException1.getBodyHeader("processor_tag"), equalTo(List.of("my_tag")));
assertThat(ingestProcessorException1.getBodyHeader("processor_type"), equalTo(List.of("my_type")));
assertThat(ingestProcessorException1.getBodyHeader("pipeline_origin"), nullValue());
IngestProcessorException ingestProcessorException2 = CompoundProcessor.newCompoundProcessorException(
ingestProcessorException1,
processor,
ingestDocument
);
assertThat(ingestProcessorException2, sameInstance(ingestProcessorException1));
}
public void testNewCompoundProcessorExceptionPipelineOrigin() {
Pipeline pipeline2 = new Pipeline(
"2",
null,
null,
null,
new CompoundProcessor(new TestProcessor("my_tag", "my_type", null, new RuntimeException()))
);
Pipeline pipeline1 = new Pipeline("1", null, null, null, new CompoundProcessor(new AbstractProcessor(null, null) {
@Override
public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
IngestDocument[] result = new IngestDocument[1];
Exception[] error = new Exception[1];
ingestDocument.executePipeline(pipeline2, (document, e) -> {
result[0] = document;
error[0] = e;
});
if (error[0] != null) {
throw error[0];
}
return result[0];
}
@Override
public void execute(IngestDocument ingestDocument, BiConsumer<IngestDocument, Exception> handler) {
throw new UnsupportedOperationException();
}
@Override
public String getType() {
return "my_type2";
}
}));
Exception[] holder = new Exception[1];
ingestDocument.executePipeline(pipeline1, (document, e) -> holder[0] = e);
IngestProcessorException ingestProcessorException = (IngestProcessorException) holder[0];
assertThat(ingestProcessorException.getBodyHeader("processor_tag"), equalTo(List.of("my_tag")));
assertThat(ingestProcessorException.getBodyHeader("processor_type"), equalTo(List.of("my_type")));
assertThat(ingestProcessorException.getBodyHeader("pipeline_origin"), equalTo(List.of("2", "1")));
}
public void testMultipleProcessors() {
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(1));
int processorsCount = 1000;
TestProcessor[] processors = new TestProcessor[processorsCount];
for (int i = 0; i < processorsCount; i++) {
processors[i] = getTestProcessor(Integer.toString(i), randomBoolean(), randomBoolean());
}
CompoundProcessor compoundProcessor = new CompoundProcessor(true, List.of(processors), List.of(), relativeTimeProvider);
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {
if (e != null) fail("CompoundProcessor threw exception despite ignoreFailure being true");
});
for (int i = 0; i < processors.length; i++) {
assertThat(
"Processor " + i + " ran " + processors[i].getInvokedCounter() + " times",
processors[i].getInvokedCounter(),
equalTo(1)
);
}
}
public void testMultipleProcessorsDoNotIgnoreFailures() {
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L, TimeUnit.MILLISECONDS.toNanos(1));
int goodProcessorsCount = 100;
int totalProcessorsCount = goodProcessorsCount * 2 + 1;
TestProcessor[] processors = new TestProcessor[totalProcessorsCount];
for (int i = 0; i < goodProcessorsCount; i++) {
processors[i] = getTestProcessor(Integer.toString(i), randomBoolean(), false);
}
processors[goodProcessorsCount] = getTestProcessor(Integer.toString(goodProcessorsCount), randomBoolean(), true);
for (int i = goodProcessorsCount + 1; i < totalProcessorsCount; i++) {
processors[i] = getTestProcessor(Integer.toString(i), randomBoolean(), false);
}
CompoundProcessor compoundProcessor = new CompoundProcessor(false, List.of(processors), List.of(), relativeTimeProvider);
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {});
for (int i = 0; i < goodProcessorsCount + 1; i++) {
assertThat(
"Processor " + i + " ran " + processors[i].getInvokedCounter() + " times",
processors[i].getInvokedCounter(),
equalTo(1)
);
}
for (int i = goodProcessorsCount + 1; i < totalProcessorsCount; i++) {
assertThat(
"Processor " + i + " ran " + processors[i].getInvokedCounter() + " times",
processors[i].getInvokedCounter(),
equalTo(0)
);
}
}
public void testSkipPipeline() {
TestProcessor processor1 = new TestProcessor(doc -> doc.reroute("foo"));
TestProcessor processor2 = new TestProcessor(new RuntimeException("this processor was expected to be skipped"));
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor compoundProcessor = new CompoundProcessor(
false,
List.of(processor1, processor2),
List.of(),
relativeTimeProvider
);
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {});
assertThat(processor1.getInvokedCounter(), equalTo(1));
assertStats(0, compoundProcessor, 0, 1, 0, 0);
assertThat(processor2.getInvokedCounter(), equalTo(0));
assertStats(1, compoundProcessor, 0, 0, 0, 0);
}
public void testSkipAsyncProcessor() {
TestProcessor processor1 = new TestProcessor(doc -> doc.reroute("foo")) {
@Override
public boolean isAsync() {
return true;
}
};
TestProcessor processor2 = new TestProcessor(new RuntimeException("this processor was expected to be skipped"));
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor compoundProcessor = new CompoundProcessor(
false,
List.of(processor1, processor2),
List.of(),
relativeTimeProvider
);
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {});
assertThat(processor1.getInvokedCounter(), equalTo(1));
assertStats(0, compoundProcessor, 0, 1, 0, 0);
assertThat(processor2.getInvokedCounter(), equalTo(0));
assertStats(1, compoundProcessor, 0, 0, 0, 0);
}
public void testSkipProcessorIgnoreFailure() {
TestProcessor processor1 = new TestProcessor(doc -> {
doc.reroute("foo");
throw new RuntimeException("simulate processor failure after calling skipCurrentPipeline()");
});
TestProcessor processor2 = new TestProcessor(doc -> {});
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor compoundProcessor = new CompoundProcessor(true, List.of(processor1, processor2), List.of(), relativeTimeProvider);
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {});
assertThat(processor1.getInvokedCounter(), equalTo(1));
assertStats(0, compoundProcessor, 0, 1, 1, 0);
assertThat(processor2.getInvokedCounter(), equalTo(0));
assertStats(1, compoundProcessor, 0, 0, 0, 0);
}
public void testDontSkipFailureProcessor() {
TestProcessor processor = new TestProcessor(doc -> {
doc.reroute("foo");
throw new RuntimeException("simulate processor failure after calling skipCurrentPipeline()");
});
TestProcessor failureProcessor1 = new TestProcessor(doc -> {});
TestProcessor failureProcessor2 = new TestProcessor(doc -> {});
LongSupplier relativeTimeProvider = mock(LongSupplier.class);
when(relativeTimeProvider.getAsLong()).thenReturn(0L);
CompoundProcessor compoundProcessor = new CompoundProcessor(
false,
List.of(processor),
List.of(failureProcessor1, failureProcessor2),
relativeTimeProvider
);
executeCompound(compoundProcessor, ingestDocument, (result, e) -> {});
assertThat(processor.getInvokedCounter(), equalTo(1));
assertStats(0, compoundProcessor, 0, 1, 1, 0);
assertThat(failureProcessor1.getInvokedCounter(), equalTo(1));
assertThat(failureProcessor2.getInvokedCounter(), equalTo(1));
}
private TestProcessor getTestProcessor(String tag, boolean isAsync, boolean shouldThrowException) {
return new TestProcessor(tag, "test-processor", null, ingestDocument -> {
if (shouldThrowException) throw new RuntimeException("Intentionally failing");
}) {
@Override
public boolean isAsync() {
return isAsync;
}
};
}
// delegates to appropriate sync or async method
private static void executeCompound(CompoundProcessor cp, IngestDocument doc, BiConsumer<IngestDocument, Exception> handler) {
if (cp.isAsync()) {
cp.execute(doc, handler);
} else {
try {
IngestDocument result = cp.execute(doc);
handler.accept(result, null);
} catch (Exception e) {
handler.accept(null, e);
}
}
}
private void assertStats(CompoundProcessor compoundProcessor, long count, long failed, long time) {
assertStats(0, compoundProcessor, 0L, count, failed, time);
}
private void assertStats(int processor, CompoundProcessor compoundProcessor, long current, long count, long failed, long time) {
IngestStats.Stats stats = compoundProcessor.getProcessorsWithMetrics().get(processor).v2().createStats();
assertThat(stats.ingestCount(), equalTo(count));
assertThat(stats.ingestCurrent(), equalTo(current));
assertThat(stats.ingestFailedCount(), equalTo(failed));
assertThat(stats.ingestTimeInMillis(), equalTo(time));
}
}
| CompoundProcessorTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/function/array/ArrayConstructorInSelectClauseTest.java | {
"start": 3810,
"end": 4152
} | class ____ {
@Id
private Long id;
private String title;
@ManyToOne
private Author author;
@ManyToOne
private Author coAuthor;
public Book() {
}
public Book(long id, String title, Author author, Author coAuthor) {
this.id = id;
this.title = title;
this.author = author;
this.coAuthor = coAuthor;
}
}
}
| Book |
java | apache__maven | compat/maven-model-builder/src/main/java/org/apache/maven/model/profile/activation/PackagingProfileActivator.java | {
"start": 1412,
"end": 2537
} | class ____ implements ProfileActivator {
@Override
public boolean isActive(
org.apache.maven.model.Profile profile, ProfileActivationContext context, ModelProblemCollector problems) {
return getActivationPackaging(profile).map(p -> isPackaging(context, p)).orElse(false);
}
@Override
public boolean presentInConfig(
org.apache.maven.model.Profile profile, ProfileActivationContext context, ModelProblemCollector problems) {
return getActivationPackaging(profile).isPresent();
}
private static boolean isPackaging(ProfileActivationContext context, String p) {
String packaging = context.getUserProperties().get(ProfileActivationContext.PROPERTY_NAME_PACKAGING);
return Objects.equals(p, packaging);
}
private static Optional<String> getActivationPackaging(org.apache.maven.model.Profile profile) {
return Optional.ofNullable(profile)
.map(org.apache.maven.model.Profile::getDelegate)
.map(Profile::getActivation)
.map(Activation::getPackaging);
}
}
| PackagingProfileActivator |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/config/ConfigData.java | {
"start": 995,
"end": 2017
} | class ____ {
private final Map<String, String> data;
private final Long ttl;
/**
* Creates a new ConfigData with the given data and TTL (in milliseconds).
*
* @param data a Map of key-value pairs
* @param ttl the time-to-live of the data in milliseconds, or null if there is no TTL
*/
public ConfigData(Map<String, String> data, Long ttl) {
this.data = data;
this.ttl = ttl;
}
/**
* Creates a new ConfigData with the given data.
*
* @param data a Map of key-value pairs
*/
public ConfigData(Map<String, String> data) {
this(data, null);
}
/**
* Returns the data.
*
* @return data a Map of key-value pairs
*/
public Map<String, String> data() {
return data;
}
/**
* Returns the TTL (in milliseconds).
*
* @return ttl the time-to-live (in milliseconds) of the data, or null if there is no TTL
*/
public Long ttl() {
return ttl;
}
}
| ConfigData |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/manytomany/Group.java | {
"start": 222,
"end": 372
} | class ____ implements Serializable {
private Long id;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
}
| Group |
java | apache__camel | components/camel-jq/src/test/java/org/apache/camel/language/jq/JqLanguageValidateTest.java | {
"start": 1040,
"end": 1558
} | class ____ {
@Test
public void testValidateLanguage() throws Exception {
JqLanguage lan = new JqLanguage();
lan.init();
Assertions.assertTrue(lan.validateExpression(". + [{\"array\": body()}]"));
try {
Assertions.assertFalse(lan.validateExpression(". ^^+ [{\"array\": body()}]"));
fail();
} catch (JsonQueryException e) {
Assertions.assertTrue(e.getMessage().startsWith("Cannot compile query"));
}
}
}
| JqLanguageValidateTest |
java | playframework__playframework | testkit/play-test/src/test/java/play/test/TestServerTest.java | {
"start": 295,
"end": 1599
} | class ____ {
@Test
public void shouldReturnHttpPort() {
int testServerPort = play.api.test.Helpers.testServerPort();
final TestServer testServer = Helpers.testServer(testServerPort);
testServer.start();
assertTrue("No value for http port", testServer.getRunningHttpPort().isPresent());
assertFalse(
"https port value is present, but was not set",
testServer.getRunningHttpsPort().isPresent());
assertTrue(
"The os provided http port is not greater than 0",
testServer.getRunningHttpPort().getAsInt() > 0);
testServer.stop();
}
@Test
public void shouldReturnHttpAndHttpsPorts() {
int port = play.api.test.Helpers.testServerPort();
int httpsPort = 0;
final TestServer testServer = Helpers.testServer(port, httpsPort);
testServer.start();
assertTrue("No value for https port", testServer.getRunningHttpsPort().isPresent());
assertTrue(
"The os provided https port is not greater than 0",
testServer.getRunningHttpsPort().getAsInt() > 0);
assertTrue("No value for http port", testServer.getRunningHttpPort().isPresent());
assertTrue(
"The os provided http port is not greater than 0",
testServer.getRunningHttpPort().getAsInt() > 0);
testServer.stop();
}
}
| TestServerTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/InheritanceToOneSubtypeJoinGroupByTest.java | {
"start": 3540,
"end": 3925
} | class ____ {
@EmbeddedId
private WhitelistEntryPK primaryKey;
private String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public WhitelistEntryPK getPrimaryKey() {
return primaryKey;
}
public void setPrimaryKey(WhitelistEntryPK primaryKey) {
this.primaryKey = primaryKey;
}
}
}
| WhitelistEntry |
java | netty__netty | codec-socks/src/main/java/io/netty/handler/codec/socksx/v5/Socks5InitialRequest.java | {
"start": 881,
"end": 1055
} | interface ____ extends Socks5Message {
/**
* Returns the list of desired authentication methods.
*/
List<Socks5AuthMethod> authMethods();
}
| Socks5InitialRequest |
java | apache__flink | flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/windowing/RichWindowFunction.java | {
"start": 1686,
"end": 1881
} | class ____<IN, OUT, KEY, W extends Window>
extends AbstractRichFunction implements WindowFunction<IN, OUT, KEY, W> {
private static final long serialVersionUID = 1L;
}
| RichWindowFunction |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/webapp/RouterWebServices.java | {
"start": 10085,
"end": 39440
} | class ____ {
private RESTRequestInterceptor rootInterceptor;
/**
* Initializes the wrapper with the specified parameters.
*
* @param interceptor the first interceptor in the pipeline
*/
public synchronized void init(RESTRequestInterceptor interceptor) {
this.rootInterceptor = interceptor;
}
/**
* Gets the root request interceptor.
*
* @return the root request interceptor
*/
public synchronized RESTRequestInterceptor getRootInterceptor() {
return rootInterceptor;
}
/**
* Shutdown the chain of interceptors when the object is destroyed.
*/
@Override
protected void finalize() {
rootInterceptor.shutdown();
}
}
@GET
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public ClusterInfo get() {
return getClusterInfo();
}
@GET
@Path(RMWSConsts.INFO)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public ClusterInfo getClusterInfo() {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(null);
return pipeline.getRootInterceptor().getClusterInfo();
}
@GET
@Path(RMWSConsts.CLUSTER_USER_INFO)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public ClusterUserInfo getClusterUserInfo(@Context HttpServletRequest hsr) {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getClusterUserInfo(hsr);
}
@GET
@Path(RMWSConsts.METRICS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public ClusterMetricsInfo getClusterMetricsInfo() {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(null);
return pipeline.getRootInterceptor().getClusterMetricsInfo();
}
@GET
@Path(RMWSConsts.SCHEDULER)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public SchedulerTypeInfo getSchedulerInfo() {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(null);
return pipeline.getRootInterceptor().getSchedulerInfo();
}
@POST
@Path(RMWSConsts.SCHEDULER_LOGS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public String dumpSchedulerLogs(@FormParam(RMWSConsts.TIME) String time,
@Context HttpServletRequest hsr) throws IOException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().dumpSchedulerLogs(time, hsr);
}
@GET
@Path(RMWSConsts.NODES)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public NodesInfo getNodes(@QueryParam(RMWSConsts.STATES) String states) {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(null);
return pipeline.getRootInterceptor().getNodes(states);
}
@GET
@Path(RMWSConsts.NODES_NODEID)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public NodeInfo getNode(@PathParam(RMWSConsts.NODEID) String nodeId) {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(null);
return pipeline.getRootInterceptor().getNode(nodeId);
}
@POST
@Path(RMWSConsts.NODE_RESOURCE)
@Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public ResourceInfo updateNodeResource(
@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.NODEID) String nodeId,
ResourceOptionInfo resourceOption) throws AuthorizationException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(null);
return pipeline.getRootInterceptor().updateNodeResource(
hsr, nodeId, resourceOption);
}
@GET
@Path(RMWSConsts.APPS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public AppsInfo getApps(@Context HttpServletRequest hsr,
@QueryParam(RMWSConsts.STATE) String stateQuery,
@QueryParam(RMWSConsts.STATES) Set<String> statesQuery,
@QueryParam(RMWSConsts.FINAL_STATUS) String finalStatusQuery,
@QueryParam(RMWSConsts.USER) String userQuery,
@QueryParam(RMWSConsts.QUEUE) String queueQuery,
@QueryParam(RMWSConsts.LIMIT) String count,
@QueryParam(RMWSConsts.STARTED_TIME_BEGIN) String startedBegin,
@QueryParam(RMWSConsts.STARTED_TIME_END) String startedEnd,
@QueryParam(RMWSConsts.FINISHED_TIME_BEGIN) String finishBegin,
@QueryParam(RMWSConsts.FINISHED_TIME_END) String finishEnd,
@QueryParam(RMWSConsts.APPLICATION_TYPES) Set<String> applicationTypes,
@QueryParam(RMWSConsts.APPLICATION_TAGS) Set<String> applicationTags,
@QueryParam(RMWSConsts.NAME) String name,
@QueryParam(RMWSConsts.DESELECTS) Set<String> unselectedFields) {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getApps(hsr, stateQuery, statesQuery,
finalStatusQuery, userQuery, queueQuery, count, startedBegin,
startedEnd, finishBegin, finishEnd, applicationTypes, applicationTags,
name, unselectedFields);
}
@GET
@Path(RMWSConsts.SCHEDULER_ACTIVITIES)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public ActivitiesInfo getActivities(@Context HttpServletRequest hsr,
@QueryParam(RMWSConsts.NODEID) String nodeId,
@QueryParam(RMWSConsts.GROUP_BY) String groupBy) {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor()
.getActivities(hsr, nodeId, groupBy);
}
@GET
@Path(RMWSConsts.SCHEDULER_BULK_ACTIVITIES)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public BulkActivitiesInfo getBulkActivities(
@Context HttpServletRequest hsr,
@QueryParam(RMWSConsts.GROUP_BY) String groupBy,
@QueryParam(RMWSConsts.ACTIVITIES_COUNT)
@DefaultValue(DEFAULT_ACTIVITIES_COUNT) int activitiesCount)
throws InterruptedException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getBulkActivities(hsr, groupBy,
activitiesCount);
}
@GET
@Path(RMWSConsts.SCHEDULER_APP_ACTIVITIES)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public AppActivitiesInfo getAppActivities(@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.APPID) String appId,
@QueryParam(RMWSConsts.MAX_TIME) String time,
@QueryParam(RMWSConsts.REQUEST_PRIORITIES) Set<String> requestPriorities,
@QueryParam(RMWSConsts.ALLOCATION_REQUEST_IDS)
Set<String> allocationRequestIds,
@QueryParam(RMWSConsts.GROUP_BY) String groupBy,
@QueryParam(RMWSConsts.LIMIT) String limit,
@QueryParam(RMWSConsts.ACTIONS) Set<String> actions,
@QueryParam(RMWSConsts.SUMMARIZE) @DefaultValue(DEFAULT_SUMMARIZE)
boolean summarize) {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getAppActivities(hsr, appId, time,
requestPriorities, allocationRequestIds, groupBy, limit, actions,
summarize);
}
@GET
@Path(RMWSConsts.APP_STATISTICS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public ApplicationStatisticsInfo getAppStatistics(
@Context HttpServletRequest hsr,
@QueryParam(RMWSConsts.STATES) Set<String> stateQueries,
@QueryParam(RMWSConsts.APPLICATION_TYPES) Set<String> typeQueries) {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getAppStatistics(hsr, stateQueries,
typeQueries);
}
@GET
@Path(RMWSConsts.APPS_APPID)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public AppInfo getApp(@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.APPID) String appId,
@QueryParam(RMWSConsts.DESELECTS) Set<String> unselectedFields) {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getApp(hsr, appId, unselectedFields);
}
@GET
@Path(RMWSConsts.APPS_APPID_STATE)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public AppState getAppState(@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.APPID) String appId) throws AuthorizationException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getAppState(hsr, appId);
}
@PUT
@Path(RMWSConsts.APPS_APPID_STATE)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response updateAppState(AppState targetState,
@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.APPID) String appId) throws AuthorizationException,
YarnException, InterruptedException, IOException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().updateAppState(targetState, hsr,
appId);
}
@GET
@Path(RMWSConsts.GET_NODE_TO_LABELS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public NodeToLabelsInfo getNodeToLabels(@Context HttpServletRequest hsr)
throws IOException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getNodeToLabels(hsr);
}
@GET
@Path(RMWSConsts.LABEL_MAPPINGS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public LabelsToNodesInfo getLabelsToNodes(
@QueryParam(RMWSConsts.LABELS) Set<String> labels) throws IOException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(null);
return pipeline.getRootInterceptor().getLabelsToNodes(labels);
}
@POST
@Path(RMWSConsts.REPLACE_NODE_TO_LABELS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response replaceLabelsOnNodes(
final NodeToLabelsEntryList newNodeToLabels,
@Context HttpServletRequest hsr) throws Exception {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().replaceLabelsOnNodes(newNodeToLabels,
hsr);
}
@POST
@Path(RMWSConsts.NODES_NODEID_REPLACE_LABELS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response replaceLabelsOnNode(
@QueryParam(RMWSConsts.LABELS) Set<String> newNodeLabelsName,
@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.NODEID) String nodeId) throws Exception {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().replaceLabelsOnNode(newNodeLabelsName,
hsr, nodeId);
}
@GET
@Path(RMWSConsts.GET_NODE_LABELS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public NodeLabelsInfo getClusterNodeLabels(@Context HttpServletRequest hsr)
throws IOException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getClusterNodeLabels(hsr);
}
@POST
@Path(RMWSConsts.ADD_NODE_LABELS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response addToClusterNodeLabels(NodeLabelsInfo newNodeLabels,
@Context HttpServletRequest hsr) throws Exception {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().addToClusterNodeLabels(newNodeLabels,
hsr);
}
@POST
@Path(RMWSConsts.REMOVE_NODE_LABELS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response removeFromClusterNodeLabels(
@QueryParam(RMWSConsts.LABELS) Set<String> oldNodeLabels,
@Context HttpServletRequest hsr) throws Exception {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor()
.removeFromClusterNodeLabels(oldNodeLabels, hsr);
}
@GET
@Path(RMWSConsts.NODES_NODEID_GETLABELS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public NodeLabelsInfo getLabelsOnNode(@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.NODEID) String nodeId) throws IOException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getLabelsOnNode(hsr, nodeId);
}
@GET
@Path(RMWSConsts.APPS_APPID_PRIORITY)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public AppPriority getAppPriority(@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.APPID) String appId) throws AuthorizationException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getAppPriority(hsr, appId);
}
@PUT
@Path(RMWSConsts.APPS_APPID_PRIORITY)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response updateApplicationPriority(AppPriority targetPriority,
@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.APPID) String appId) throws AuthorizationException,
YarnException, InterruptedException, IOException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor()
.updateApplicationPriority(targetPriority, hsr, appId);
}
@GET
@Path(RMWSConsts.APPS_APPID_QUEUE)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public AppQueue getAppQueue(@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.APPID) String appId) throws AuthorizationException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getAppQueue(hsr, appId);
}
@PUT
@Path(RMWSConsts.APPS_APPID_QUEUE)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response updateAppQueue(AppQueue targetQueue,
@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.APPID) String appId) throws AuthorizationException,
YarnException, InterruptedException, IOException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().updateAppQueue(targetQueue, hsr,
appId);
}
@POST
@Path(RMWSConsts.APPS_NEW_APPLICATION)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response createNewApplication(@Context HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().createNewApplication(hsr);
}
@POST
@Path(RMWSConsts.APPS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response submitApplication(ApplicationSubmissionContextInfo newApp,
@Context HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().submitApplication(newApp, hsr);
}
@POST
@Path(RMWSConsts.DELEGATION_TOKEN)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response postDelegationToken(DelegationToken tokenData,
@Context HttpServletRequest hsr) throws AuthorizationException,
IOException, InterruptedException, Exception {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().postDelegationToken(tokenData, hsr);
}
@POST
@Path(RMWSConsts.DELEGATION_TOKEN_EXPIRATION)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response postDelegationTokenExpiration(@Context HttpServletRequest hsr)
throws AuthorizationException, IOException, Exception {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().postDelegationTokenExpiration(hsr);
}
@DELETE
@Path(RMWSConsts.DELEGATION_TOKEN)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response cancelDelegationToken(@Context HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException,
Exception {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().cancelDelegationToken(hsr);
}
@POST
@Path(RMWSConsts.RESERVATION_NEW)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response createNewReservation(@Context HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().createNewReservation(hsr);
}
@POST
@Path(RMWSConsts.RESERVATION_SUBMIT)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response submitReservation(ReservationSubmissionRequestInfo resContext,
@Context HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().submitReservation(resContext, hsr);
}
@POST
@Path(RMWSConsts.RESERVATION_UPDATE)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response updateReservation(ReservationUpdateRequestInfo resContext,
@Context HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().updateReservation(resContext, hsr);
}
@POST
@Path(RMWSConsts.RESERVATION_DELETE)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response deleteReservation(ReservationDeleteRequestInfo resContext,
@Context HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().deleteReservation(resContext, hsr);
}
@GET
@Path(RMWSConsts.RESERVATION_LIST)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response listReservation(
@QueryParam(RMWSConsts.QUEUE) @DefaultValue(DEFAULT_QUEUE) String queue,
@QueryParam(RMWSConsts.RESERVATION_ID)
@DefaultValue(DEFAULT_RESERVATION_ID) String reservationId,
@QueryParam(RMWSConsts.START_TIME) @DefaultValue(DEFAULT_START_TIME) long startTime,
@QueryParam(RMWSConsts.END_TIME) @DefaultValue(DEFAULT_END_TIME) long endTime,
@QueryParam(RMWSConsts.INCLUDE_RESOURCE)
@DefaultValue(DEFAULT_INCLUDE_RESOURCE) boolean includeResourceAllocations,
@Context HttpServletRequest hsr) throws Exception {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().listReservation(queue, reservationId,
startTime, endTime, includeResourceAllocations, hsr);
}
@GET
@Path(RMWSConsts.APPS_TIMEOUTS_TYPE)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public AppTimeoutInfo getAppTimeout(@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.APPID) String appId,
@PathParam(RMWSConsts.TYPE) String type) throws AuthorizationException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getAppTimeout(hsr, appId, type);
}
@GET
@Path(RMWSConsts.APPS_TIMEOUTS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public AppTimeoutsInfo getAppTimeouts(@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.APPID) String appId) throws AuthorizationException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getAppTimeouts(hsr, appId);
}
@PUT
@Path(RMWSConsts.APPS_TIMEOUT)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response updateApplicationTimeout(AppTimeoutInfo appTimeout,
@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.APPID) String appId) throws AuthorizationException,
YarnException, InterruptedException, IOException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().updateApplicationTimeout(appTimeout,
hsr, appId);
}
@GET
@Path(RMWSConsts.APPS_APPID_APPATTEMPTS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public AppAttemptsInfo getAppAttempts(@Context HttpServletRequest hsr,
@PathParam(RMWSConsts.APPID) String appId) {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getAppAttempts(hsr, appId);
}
@GET
@Path(RMWSConsts.CHECK_USER_ACCESS_TO_QUEUE)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public RMQueueAclInfo checkUserAccessToQueue(
@PathParam(RMWSConsts.QUEUE) String queue,
@QueryParam(RMWSConsts.USER) String username,
@QueryParam(RMWSConsts.QUEUE_ACL_TYPE)
@DefaultValue("SUBMIT_APPLICATIONS") String queueAclType,
@Context HttpServletRequest hsr) throws AuthorizationException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().checkUserAccessToQueue(queue,
username, queueAclType, hsr);
}
@GET
@Path(RMWSConsts.APPS_APPID_APPATTEMPTS_APPATTEMPTID)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public org.apache.hadoop.yarn.server.webapp.dao.AppAttemptInfo getAppAttempt(
@Context HttpServletRequest req, @Context HttpServletResponse res,
@PathParam(RMWSConsts.APPID) String appId,
@PathParam(RMWSConsts.APPATTEMPTID) String appAttemptId) {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(req);
return pipeline.getRootInterceptor().getAppAttempt(req, res, appId,
appAttemptId);
}
@GET
@Path(RMWSConsts.APPS_APPID_APPATTEMPTS_APPATTEMPTID_CONTAINERS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public ContainersInfo getContainers(@Context HttpServletRequest req,
@Context HttpServletResponse res,
@PathParam(RMWSConsts.APPID) String appId,
@PathParam(RMWSConsts.APPATTEMPTID) String appAttemptId) {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(req);
return pipeline.getRootInterceptor().getContainers(req, res, appId,
appAttemptId);
}
@GET
@Path(RMWSConsts.GET_CONTAINER)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public ContainerInfo getContainer(@Context HttpServletRequest req,
@Context HttpServletResponse res,
@PathParam(RMWSConsts.APPID) String appId,
@PathParam(RMWSConsts.APPATTEMPTID) String appAttemptId,
@PathParam(RMWSConsts.CONTAINERID) String containerId) {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(req);
return pipeline.getRootInterceptor().getContainer(req, res, appId,
appAttemptId, containerId);
}
@PUT
@Path(RMWSConsts.SCHEDULER_CONF)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
@Override
public Response updateSchedulerConfiguration(SchedConfUpdateInfo mutationInfo,
@Context HttpServletRequest hsr)
throws AuthorizationException, InterruptedException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor()
.updateSchedulerConfiguration(mutationInfo, hsr);
}
@GET
@Path(RMWSConsts.SCHEDULER_CONF)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public Response getSchedulerConfiguration(HttpServletRequest hsr)
throws AuthorizationException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getSchedulerConfiguration(hsr);
}
@VisibleForTesting
protected void setResponse(HttpServletResponse response) {
this.response = response;
}
@POST
@Path(RMWSConsts.SIGNAL_TO_CONTAINER)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public Response signalToContainer(
@PathParam(RMWSConsts.CONTAINERID) String containerId,
@PathParam(RMWSConsts.COMMAND) String command,
@Context HttpServletRequest req)
throws AuthorizationException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(req);
return pipeline.getRootInterceptor()
.signalToContainer(containerId, command, req);
}
@GET
@Path(RMWSConsts.GET_RM_NODE_LABELS)
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public NodeLabelsInfo getRMNodeLabels(@Context HttpServletRequest hsr)
throws IOException {
init();
RequestInterceptorChainWrapper pipeline = getInterceptorChain(hsr);
return pipeline.getRootInterceptor().getRMNodeLabels(hsr);
}
public Router getRouter() {
return router;
}
}
| RequestInterceptorChainWrapper |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/any/annotations/PropertySet.java | {
"start": 775,
"end": 2458
} | class ____ {
private Integer id;
private String name;
private Property someProperty;
private List<Property> generalProperties = new ArrayList<Property>();
public PropertySet() {
super();
}
public PropertySet(String name) {
this.name = name;
}
@ManyToAny
@Column( name = "property_type" )
@AnyKeyJavaClass( Integer.class )
@AnyDiscriminatorValue( discriminator = "S", entity = StringProperty.class )
@AnyDiscriminatorValue( discriminator = "I", entity = IntegerProperty.class )
@Cascade( { CascadeType.ALL } )
@JoinTable(
name = "obj_properties",
joinColumns = @JoinColumn( name = "obj_id" ),
inverseJoinColumns = @JoinColumn( name = "property_id" ) )
public List<Property> getGeneralProperties() {
return generalProperties;
}
public void setGeneralProperties(List<Property> generalProperties) {
this.generalProperties = generalProperties;
}
@Id
@GeneratedValue
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Any
@Column( name = "property_type" )
@AnyKeyJavaClass( Integer.class )
@AnyDiscriminatorValue( discriminator = "S", entity = StringProperty.class )
@AnyDiscriminatorValue( discriminator = "I", entity = IntegerProperty.class )
@Cascade( value = { CascadeType.ALL } )
@JoinColumn( name = "property_id" )
public Property getSomeProperty() {
return someProperty;
}
public void setSomeProperty(Property someProperty) {
this.someProperty = someProperty;
}
public void addGeneralProperty(Property property) {
this.generalProperties.add( property );
}
}
| PropertySet |
java | google__dagger | javatests/dagger/internal/codegen/ScopingValidationTest.java | {
"start": 15581,
"end": 15843
} | class ____ { @Inject B() {} }",
"}");
Source simpleScope =
CompilerTests.javaSource(
"test.SimpleScope",
"package test;",
"",
"import javax.inject.Scope;",
"",
"@Scope @ | B |
java | elastic__elasticsearch | modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java | {
"start": 32764,
"end": 36309
} | class ____ implements HttpHandler {
private int failuresWithoutProgress;
@Override
public void handle(HttpExchange exchange) throws IOException {
Streams.readFully(exchange.getRequestBody());
if (failuresWithoutProgress >= maxRetries) {
final int rangeStart = getRangeStart(exchange);
assertThat(rangeStart, lessThan(bytes.length));
exchange.getResponseHeaders().add("Content-Type", bytesContentType());
final var remainderLength = bytes.length - rangeStart;
exchange.sendResponseHeaders(HttpStatus.SC_OK, remainderLength);
exchange.getResponseBody()
.write(
bytes,
rangeStart,
remainderLength < meaningfulProgressBytes ? remainderLength : between(meaningfulProgressBytes, remainderLength)
);
} else if (randomBoolean()) {
failuresWithoutProgress += 1;
exchange.sendResponseHeaders(
randomFrom(
HttpStatus.SC_INTERNAL_SERVER_ERROR,
HttpStatus.SC_BAD_GATEWAY,
HttpStatus.SC_SERVICE_UNAVAILABLE,
HttpStatus.SC_GATEWAY_TIMEOUT
),
-1
);
exchange.getResponseBody().flush();
} else if (randomBoolean()) {
final var bytesSent = sendIncompleteContent(exchange, bytes);
if (bytesSent < meaningfulProgressBytes) {
failuresWithoutProgress += 1;
}
} else {
failuresWithoutProgress += 1;
}
exchange.getResponseBody().flush();
exchange.close();
}
}
httpServer.createContext(downloadStorageEndpoint(blobContainer, "read_blob_max_retries"), new FlakyReadHandler());
try (InputStream inputStream = blobContainer.readBlob(randomRetryingPurpose(), "read_blob_max_retries")) {
final int readLimit;
final InputStream wrappedStream;
if (randomBoolean()) {
// read stream only partly
readLimit = randomIntBetween(0, bytes.length);
wrappedStream = Streams.limitStream(inputStream, readLimit);
} else {
readLimit = bytes.length;
wrappedStream = inputStream;
}
final byte[] bytesRead = BytesReference.toBytes(Streams.readFully(wrappedStream));
assertArrayEquals(Arrays.copyOfRange(bytes, 0, readLimit), bytesRead);
}
}
public void testReadDoesNotRetryForRepositoryAnalysis() {
final int maxRetries = between(0, 5);
final int bufferSizeBytes = scaledRandomIntBetween(
0,
randomFrom(1000, Math.toIntExact(S3Repository.BUFFER_SIZE_SETTING.get(Settings.EMPTY).getBytes()))
);
final BlobContainer blobContainer = blobContainerBuilder().maxRetries(maxRetries)
.disableChunkedEncoding(true)
.bufferSize(ByteSizeValue.ofBytes(bufferSizeBytes))
.build();
final byte[] bytes = randomBlobContent();
@SuppressForbidden(reason = "use a http server")
| FlakyReadHandler |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/codec/tsdb/TSDBSyntheticIdFieldsProducer.java | {
"start": 19367,
"end": 22983
} | enum ____ the first non-matching document
if (tsIdOrd < docValues.getTsIdValueCount()) {
int docID = docValues.findFirstDocWithTsIdOrdinalEqualOrGreaterThan(tsIdOrd);
if (docID != DocIdSetIterator.NO_MORE_DOCS) {
current = new SyntheticTerm(
docID,
tsIdOrd,
docValues.lookupTsIdOrd(tsIdOrd),
docValues.docTimestamp(docID),
docValues.docRoutingHash(docID)
);
return SeekStatus.NOT_FOUND;
}
}
// no docs/terms to iterate on
current = NO_MORE_DOCS;
return SeekStatus.END;
}
// _tsid found, extract the timestamp
final long timestamp = TsidExtractingIdFieldMapper.extractTimestampFromSyntheticId(id);
// Find the first document matching the _tsid
final int startDocID = docValues.findFirstDocWithTsIdOrdinalEqualTo(tsIdOrd);
assert startDocID >= 0 : startDocID;
int docID = startDocID;
int docTsIdOrd = tsIdOrd;
long docTimestamp;
// Iterate over documents to find the first one matching the timestamp
for (; docID < maxDocs; docID++) {
docTimestamp = docValues.docTimestamp(docID);
if (startDocID < docID) {
// After the first doc, we need to check again if _tsid matches
docTsIdOrd = docValues.docTsIdOrdinal(docID);
}
if (docTsIdOrd == tsIdOrd && docTimestamp == timestamp) {
// It's a match!
current = new SyntheticTerm(docID, tsIdOrd, tsId, docTimestamp, docValues.docRoutingHash(docID));
return SeekStatus.FOUND;
}
// Remaining docs don't match, stop here
if (tsIdOrd < docTsIdOrd || docTimestamp < timestamp) {
break;
}
}
current = NO_MORE_DOCS;
return SeekStatus.END;
}
@Override
public BytesRef term() {
ensurePositioned();
return current.term();
}
@Override
public PostingsEnum postings(PostingsEnum reuse, int flags) {
ensurePositioned();
return new SyntheticIdPostingsEnum(current);
}
/**
* This is an optional method as per the {@link TermsEnum#ord()} documentation that is not supported by the current implementation.
* This method always throws an {@link UnsupportedOperationException}.
*/
@Override
public long ord() {
throw unsupportedException();
}
/**
* This is an optional method as per the {@link TermsEnum#ord()} documentation that is not supported by the current implementation.
* This method always throws an {@link UnsupportedOperationException}.
*/
@Override
public void seekExact(long ord) {
throw unsupportedException();
}
@Override
public int docFreq() throws IOException {
return 0;
}
@Override
public long totalTermFreq() throws IOException {
return 0;
}
@Override
public ImpactsEnum impacts(int flags) throws IOException {
return null;
}
}
private | on |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/LoopCopyPropagateVariableAsResultTestTest.java | {
"start": 973,
"end": 2027
} | class ____ extends ContextTestSupport {
@Test
public void testLoopCopy() throws Exception {
getMockEndpoint("mock:loop").expectedBodiesReceived("AB", "AB", "AB");
getMockEndpoint("mock:loop").expectedVariableReceived("foo", "AB");
getMockEndpoint("mock:result").expectedBodiesReceived("AB");
getMockEndpoint("mock:result").expectedVariableReceived("foo", "AB");
template.sendBody("direct:start", "A");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.loop(3).copy()
.transform(body().append("B"))
.setVariable("foo", simple("${body}"))
.to("mock:loop")
.end()
.to("mock:result");
}
};
}
}
| LoopCopyPropagateVariableAsResultTestTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.