language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__kafka
|
raft/src/test/java/org/apache/kafka/raft/internals/RecordsBatchReaderTest.java
|
{
"start": 2068,
"end": 5833
}
|
class ____ {
private static final int MAX_BATCH_BYTES = 128;
private final StringSerde serde = new StringSerde();
@ParameterizedTest
@EnumSource(CompressionType.class)
public void testReadFromMemoryRecords(CompressionType compressionType) {
long seed = 57;
List<TestBatch<String>> batches = RecordsIteratorTest.createBatches(seed);
long baseOffset = batches.get(0).baseOffset;
MemoryRecords memRecords = RecordsIteratorTest.buildRecords(compressionType, batches);
testBatchReader(baseOffset, memRecords, batches);
}
@ParameterizedTest
@EnumSource(CompressionType.class)
public void testReadFromFileRecords(CompressionType compressionType) throws Exception {
long seed = 57;
List<TestBatch<String>> batches = RecordsIteratorTest.createBatches(seed);
long baseOffset = batches.get(0).baseOffset;
MemoryRecords memRecords = RecordsIteratorTest.buildRecords(compressionType, batches);
FileRecords fileRecords = FileRecords.open(tempFile());
fileRecords.append(memRecords);
testBatchReader(baseOffset, fileRecords, batches);
}
@Test
public void testLeaderChangeControlBatch() {
// Confirm that the RecordsBatchReader is able to iterate over control batches
MemoryRecords records = RecordsIteratorTest.buildControlRecords(ControlRecordType.LEADER_CHANGE);
ControlRecord expectedRecord = ControlRecord.of(new LeaderChangeMessage());
try (RecordsBatchReader<String> reader = RecordsBatchReader.of(
0,
records,
serde,
BufferSupplier.NO_CACHING,
MAX_BATCH_BYTES,
ignore -> { },
true,
new LogContext()
)
) {
assertTrue(reader.hasNext());
assertEquals(List.of(expectedRecord), reader.next().controlRecords());
assertFalse(reader.hasNext());
}
}
private void testBatchReader(
long baseOffset,
Records records,
List<TestBatch<String>> expectedBatches
) {
BufferSupplier bufferSupplier = Mockito.mock(BufferSupplier.class);
Set<ByteBuffer> allocatedBuffers = Collections.newSetFromMap(new IdentityHashMap<>());
Mockito.when(bufferSupplier.get(Mockito.anyInt())).thenAnswer(invocation -> {
int size = invocation.getArgument(0);
ByteBuffer buffer = ByteBuffer.allocate(size);
allocatedBuffers.add(buffer);
return buffer;
});
Mockito.doAnswer(invocation -> {
ByteBuffer released = invocation.getArgument(0);
allocatedBuffers.remove(released);
return null;
}).when(bufferSupplier).release(Mockito.any(ByteBuffer.class));
@SuppressWarnings("unchecked")
CloseListener<BatchReader<String>> closeListener = Mockito.mock(CloseListener.class);
RecordsBatchReader<String> reader = RecordsBatchReader.of(
baseOffset,
records,
serde,
bufferSupplier,
MAX_BATCH_BYTES,
closeListener,
true,
new LogContext()
);
try {
for (TestBatch<String> batch : expectedBatches) {
assertTrue(reader.hasNext());
assertEquals(batch, TestBatch.from(reader.next()));
}
assertFalse(reader.hasNext());
assertThrows(NoSuchElementException.class, reader::next);
} finally {
reader.close();
}
Mockito.verify(closeListener).onClose(reader);
assertEquals(Set.of(), allocatedBuffers);
}
}
|
RecordsBatchReaderTest
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/lookup/StrMatcher.java
|
{
"start": 1226,
"end": 8995
}
|
class ____ {
/**
* Matches the comma character.
*/
private static final StrMatcher COMMA_MATCHER = new CharMatcher(',');
/**
* Matches the tab character.
*/
private static final StrMatcher TAB_MATCHER = new CharMatcher(Chars.TAB);
/**
* Matches the space character.
*/
private static final StrMatcher SPACE_MATCHER = new CharMatcher(Chars.SPACE);
/**
* Matches the same characters as StringTokenizer,
* namely space, tab, newline, formfeed.
*/
private static final StrMatcher SPLIT_MATCHER = new CharSetMatcher(" \t\n\r\f".toCharArray());
/**
* Matches the String trim() whitespace characters.
*/
private static final StrMatcher TRIM_MATCHER = new TrimMatcher();
/**
* Matches the double quote character.
*/
private static final StrMatcher SINGLE_QUOTE_MATCHER = new CharMatcher(Chars.QUOTE);
/**
* Matches the double quote character.
*/
private static final StrMatcher DOUBLE_QUOTE_MATCHER = new CharMatcher(Chars.DQUOTE);
/**
* Matches the single or double quote character.
*/
private static final StrMatcher QUOTE_MATCHER = new CharSetMatcher("'\"".toCharArray());
/**
* Matches no characters.
*/
private static final StrMatcher NONE_MATCHER = new NoMatcher();
/**
* Constructor.
*/
protected StrMatcher() {}
/**
* Returns a matcher which matches the comma character.
*
* @return a matcher for a comma
*/
public static StrMatcher commaMatcher() {
return COMMA_MATCHER;
}
/**
* Returns a matcher which matches the tab character.
*
* @return a matcher for a tab
*/
public static StrMatcher tabMatcher() {
return TAB_MATCHER;
}
/**
* Returns a matcher which matches the space character.
*
* @return a matcher for a space
*/
public static StrMatcher spaceMatcher() {
return SPACE_MATCHER;
}
/**
* Matches the same characters as StringTokenizer,
* namely space, tab, newline and formfeed.
*
* @return the split matcher
*/
public static StrMatcher splitMatcher() {
return SPLIT_MATCHER;
}
/**
* Matches the String trim() whitespace characters.
*
* @return the trim matcher
*/
public static StrMatcher trimMatcher() {
return TRIM_MATCHER;
}
/**
* Returns a matcher which matches the single quote character.
*
* @return a matcher for a single quote
*/
public static StrMatcher singleQuoteMatcher() {
return SINGLE_QUOTE_MATCHER;
}
/**
* Returns a matcher which matches the double quote character.
*
* @return a matcher for a double quote
*/
public static StrMatcher doubleQuoteMatcher() {
return DOUBLE_QUOTE_MATCHER;
}
/**
* Returns a matcher which matches the single or double quote character.
*
* @return a matcher for a single or double quote
*/
public static StrMatcher quoteMatcher() {
return QUOTE_MATCHER;
}
/**
* Matches no characters.
*
* @return a matcher that matches nothing
*/
public static StrMatcher noneMatcher() {
return NONE_MATCHER;
}
/**
* Constructor that creates a matcher from a character.
*
* @param ch the character to match, must not be null
* @return a new Matcher for the given char
*/
public static StrMatcher charMatcher(final char ch) {
return new CharMatcher(ch);
}
/**
* Constructor that creates a matcher from a set of characters.
*
* @param chars the characters to match, null or empty matches nothing
* @return a new matcher for the given char[]
*/
public static StrMatcher charSetMatcher(final char[] chars) {
if (chars == null || chars.length == 0) {
return NONE_MATCHER;
}
if (chars.length == 1) {
return new CharMatcher(chars[0]);
}
return new CharSetMatcher(chars);
}
/**
* Constructor that creates a matcher from a string representing a set of characters.
*
* @param chars the characters to match, null or empty matches nothing
* @return a new Matcher for the given characters
*/
public static StrMatcher charSetMatcher(final String chars) {
if (Strings.isEmpty(chars)) {
return NONE_MATCHER;
}
if (chars.length() == 1) {
return new CharMatcher(chars.charAt(0));
}
return new CharSetMatcher(chars.toCharArray());
}
/**
* Constructor that creates a matcher from a string.
*
* @param str the string to match, null or empty matches nothing
* @return a new Matcher for the given String
*/
public static StrMatcher stringMatcher(final String str) {
if (Strings.isEmpty(str)) {
return NONE_MATCHER;
}
return new StringMatcher(str);
}
/**
* Returns the number of matching characters, zero for no match.
* <p>
* This method is called to check for a match.
* The parameter <code>pos</code> represents the current position to be
* checked in the string <code>buffer</code> (a character array which must
* not be changed).
* The API guarantees that <code>pos</code> is a valid index for <code>buffer</code>.
* <p>
* The character array may be larger than the active area to be matched.
* Only values in the buffer between the specified indices may be accessed.
* <p>
* The matching code may check one character or many.
* It may check characters preceding <code>pos</code> as well as those
* after, so long as no checks exceed the bounds specified.
* <p>
* It must return zero for no match, or a positive number if a match was found.
* The number indicates the number of characters that matched.
*
* @param buffer the text content to match against, do not change
* @param pos the starting position for the match, valid for buffer
* @param bufferStart the first active index in the buffer, valid for buffer
* @param bufferEnd the end index (exclusive) of the active buffer, valid for buffer
* @return the number of matching characters, zero for no match
*/
public abstract int isMatch(char[] buffer, int pos, int bufferStart, int bufferEnd);
/**
* Returns the number of matching characters, zero for no match.
* <p>
* This method is called to check for a match.
* The parameter <code>pos</code> represents the current position to be
* checked in the string <code>buffer</code> (a character array which must
* not be changed).
* The API guarantees that <code>pos</code> is a valid index for <code>buffer</code>.
* <p>
* The matching code may check one character or many.
* It may check characters preceding <code>pos</code> as well as those after.
* <p>
* It must return zero for no match, or a positive number if a match was found.
* The number indicates the number of characters that matched.
*
* @param buffer the text content to match against, do not change
* @param pos the starting position for the match, valid for buffer
* @return the number of matching characters, zero for no match
* @since 2.4
*/
public int isMatch(final char[] buffer, final int pos) {
return isMatch(buffer, pos, 0, buffer.length);
}
// -----------------------------------------------------------------------
/**
* Class used to define a set of characters for matching purposes.
*/
static final
|
StrMatcher
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/core/fs/AutoCloseableRegistry.java
|
{
"start": 1612,
"end": 1727
}
|
class ____ all registered {@link Closeable}s in the reverse registration order.
*/
@ThreadSafe
@Internal
public
|
closes
|
java
|
micronaut-projects__micronaut-core
|
http-server-netty/src/test/groovy/io/micronaut/http/server/netty/http2/Http2PostTest.java
|
{
"start": 1807,
"end": 5514
}
|
class ____ implements TestPropertyProvider {
@Inject
EmbeddedServer embeddedServer;
@Inject
@Client(value = "/", httpVersion = HttpVersion.HTTP_2_0)
HttpClient client;
@Inject
ServerSslBuilder serverSslBuilder;
@Test
void testPost() {
HttpResponse<String> result = Flux.from(client.exchange(HttpRequest.POST("/vertx/demo/testPost", "Request-1")
.contentType(MediaType.TEXT_PLAIN), String.class))
.blockFirst();
Assertions.assertEquals(
"Test succeeded on POST. Received : Request-1",
result.body()
);
result = Flux.from(client.exchange(HttpRequest.POST("/vertx/demo/testPost", "Request-2")
.contentType(MediaType.TEXT_PLAIN), String.class))
.blockFirst();
Assertions.assertEquals(
"Test succeeded on POST. Received : Request-2",
result.body()
);
}
@Test
void testPostVertx() throws ExecutionException, InterruptedException, TimeoutException {
Vertx vertx = Vertx.vertx();
HttpClientOptions options = new HttpClientOptions()
.setProtocolVersion(io.vertx.core.http.HttpVersion.HTTP_2)
.setSsl(true)
// .setLogActivity(true)
.setTrustAll(true).setVerifyHost(false)
.setUseAlpn(true)
.setDefaultHost("localhost")
.setDefaultPort(embeddedServer.getPort());
io.vertx.core.http.HttpClient client = vertx.createHttpClient(options);
// Going to send 2 POST requests. 2nd request will not be succeessful
HttpClientResponse response1 = client.request(HttpMethod.POST, "/vertx/demo/testPost")
.toCompletionStage().toCompletableFuture().get()
.putHeader("content-length", "9")
.send("Request-1")
.onSuccess(resp -> {
// trigger loading body
resp.body();
})
.toCompletionStage().toCompletableFuture().get();
System.out.println("Received response with status code " + response1.statusCode() + " " + response1.version());
Assertions.assertEquals(
"Test succeeded on POST. Received : Request-1",
response1.body().toCompletionStage().toCompletableFuture().get(5, TimeUnit.SECONDS).toString(StandardCharsets.UTF_8)
);
HttpClientResponse response2 = client.request(HttpMethod.POST, "/vertx/demo/testPost")
.toCompletionStage().toCompletableFuture().get()
.putHeader("content-length", "9")
.send("Request-2")
.onSuccess(resp -> {
// trigger loading body
resp.body();
})
.toCompletionStage().toCompletableFuture().get();
System.out.println("Received response with status code " + response2.statusCode() + " " + response2.version());
Assertions.assertEquals(
"Test succeeded on POST. Received : Request-2",
response2.body().toCompletionStage().toCompletableFuture().get(5, TimeUnit.SECONDS).toString(StandardCharsets.UTF_8)
);
}
@NonNull
@Override
public Map<String, String> getProperties() {
return CollectionUtils.mapOf(
"micronaut.ssl.enabled", true,
"micronaut.server.ssl.buildSelfSigned", true,
"micronaut.server.ssl.port", -1,
"micronaut.http.client.ssl.insecure-trust-all-certificates", true
);
}
@Requires(property = "spec.name", value = "Http2PostTest")
@Controller("/vertx/demo")
public static
|
Http2PostTest
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/configuration/RecursiveComparisonConfiguration_shouldNotEvaluate_Test.java
|
{
"start": 1844,
"end": 16069
}
|
class ____ {
private RecursiveComparisonConfiguration recursiveComparisonConfiguration;
@BeforeEach
void setup() {
recursiveComparisonConfiguration = new RecursiveComparisonConfiguration();
}
@Test
void should_evaluate_all_fields_when_compared_types_are_specified_as_a_value_not_to_compare_could_have_a_field_to_compare() {
// GIVEN
recursiveComparisonConfiguration.compareOnlyFieldsOfTypes(Person.class);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue("ceo", new Employee()));
// THEN
then(ignored).isFalse();
}
@ParameterizedTest(name = "{0} should be not be evaluated")
@MethodSource
void should_not_evaluate_actual_null_fields(DualValue dualValue) {
// GIVEN
recursiveComparisonConfiguration.setIgnoreAllActualNullFields(true);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).as("%s should not be evaluated", dualValue).isTrue();
}
private static Stream<Arguments> should_not_evaluate_actual_null_fields() {
return Stream.of(arguments(dualValue(null, "John")),
arguments(dualValue(null, 123)),
arguments(dualValue(null, null)),
arguments(dualValue(null, new Date())));
}
@ParameterizedTest(name = "{0} should not be evaluated")
@MethodSource
void should_not_evaluate_actual_optional_empty_fields(DualValue dualValue) {
// GIVEN
recursiveComparisonConfiguration.setIgnoreAllActualEmptyOptionalFields(true);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).as("%s should not be evaluated", dualValue).isTrue();
}
private static Stream<Arguments> should_not_evaluate_actual_optional_empty_fields() {
return Stream.of(arguments(dualValue(Optional.empty(), "John")),
arguments(dualValue(Optional.empty(), Optional.of("John"))),
arguments(dualValue(OptionalInt.empty(), OptionalInt.of(123))),
arguments(dualValue(OptionalLong.empty(), OptionalLong.of(123L))),
arguments(dualValue(OptionalDouble.empty(), OptionalDouble.of(123.0))));
}
@ParameterizedTest(name = "{0} should not be evaluated")
@MethodSource
void should_not_evaluate_expected_null_fields(DualValue dualValue) {
// GIVEN
recursiveComparisonConfiguration.setIgnoreAllExpectedNullFields(true);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).as("%s should not be evaluated", dualValue).isTrue();
}
private static Stream<Arguments> should_not_evaluate_expected_null_fields() {
return Stream.of(arguments(dualValue("John", null)),
arguments(dualValue(123, null)),
arguments(dualValue(null, null)),
arguments(dualValue(new Date(), null)));
}
@ParameterizedTest(name = "{0} should be ignored with these ignored fields {1}")
@MethodSource
void should_not_evaluate_specified_fields(DualValue dualValue, List<String> ignoredFields) {
// GIVEN
recursiveComparisonConfiguration.ignoreFields(ignoredFields.toArray(new String[0]));
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).as("%s should be ignored with these ignored fields %s", dualValue, ignoredFields).isTrue();
}
private static Stream<Arguments> should_not_evaluate_specified_fields() {
return Stream.of(arguments(dualValueWithPath("name"), list("name")),
arguments(dualValueWithPath("name"), list("foo", "name", "foo")),
arguments(dualValueWithPath("name", "first"), list("name.first")),
arguments(dualValueWithPath("name", "[2]", "first"), list("name.first")),
arguments(dualValueWithPath("[0]", "first"), list("first")),
arguments(dualValueWithPath("[1]", "first", "second"), list("first.second")),
arguments(dualValueWithPath("father", "name", "first"), list("father", "name.first", "father.name.first")));
}
@Test
void ignoring_fields_with_regex_does_not_replace_previous_regexes() {
// WHEN
recursiveComparisonConfiguration.ignoreFieldsMatchingRegexes("foo");
recursiveComparisonConfiguration.ignoreFieldsMatchingRegexes("bar", "baz");
// THEN
then(recursiveComparisonConfiguration.getIgnoredFieldsRegexes()).extracting(Pattern::pattern)
.containsExactlyInAnyOrder("foo", "bar", "baz");
}
@ParameterizedTest(name = "{0} should be ignored with these regexes {1}")
@MethodSource
void should_not_evaluate_fields_matching_given_regexes(DualValue dualValue, List<String> regexes) {
// GIVEN
recursiveComparisonConfiguration.ignoreFieldsMatchingRegexes(regexes.toArray(new String[0]));
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).as("%s should be ignored with these regexes %s", dualValue, regexes).isTrue();
}
private static Stream<Arguments> should_not_evaluate_fields_matching_given_regexes() {
return Stream.of(arguments(dualValueWithPath("name"), list(".*name")),
arguments(dualValueWithPath("name"), list("foo", "n.m.", "foo")),
arguments(dualValueWithPath("name", "first"), list("name\\.first")),
arguments(dualValueWithPath("name", "first"), list(".*first")),
arguments(dualValueWithPath("name", "first"), list("name.*")),
arguments(dualValueWithPath("name", "[2]", "first"), list("name\\.first")),
arguments(dualValueWithPath("[0]", "first"), list("fir.*")),
arguments(dualValueWithPath("[1]", "first", "second"), list("f..st\\..*nd")),
arguments(dualValueWithPath("father", "name", "first"),
list("father", "name.first", "father\\.name\\.first")));
}
@ParameterizedTest(name = "{0} should not be evaluated")
@MethodSource
void should_not_evaluate_fields(DualValue dualValue) {
// GIVEN
recursiveComparisonConfiguration.ignoreFieldsMatchingRegexes(".*name");
recursiveComparisonConfiguration.ignoreFields("number");
recursiveComparisonConfiguration.ignoreFieldsOfTypes(String.class);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).as("%s should not be evaluated", dualValue).isTrue();
}
private static Stream<Arguments> should_not_evaluate_fields() {
return Stream.of(arguments(dualValueWithPath("name")),
arguments(dualValueWithPath("number")),
arguments(dualValueWithPath("surname")),
arguments(dualValueWithPath("first", "name")),
arguments(new DualValue(randomPath(), "actual", "expected")));
}
@Test
void ignoring_fields_for_types_does_not_replace_previous_ignored_types() {
// WHEN
recursiveComparisonConfiguration.ignoreFieldsOfTypes(UUID.class);
recursiveComparisonConfiguration.ignoreFieldsOfTypes(ZonedDateTime.class, String.class);
// THEN
then(recursiveComparisonConfiguration.getIgnoredTypes()).containsExactlyInAnyOrder(UUID.class, ZonedDateTime.class,
String.class);
}
@ParameterizedTest(name = "{0} should be ignored with these ignored types {1}")
@MethodSource
void should_not_evaluate_fields_of_specified_types(DualValue dualValue, List<Class<?>> ignoredTypes) {
// GIVEN
recursiveComparisonConfiguration.ignoreFieldsOfTypes(ignoredTypes.toArray(new Class<?>[0]));
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).as("%s should be ignored with these ignored types %s", dualValue, ignoredTypes)
.isTrue();
}
private static Stream<Arguments> should_not_evaluate_fields_of_specified_types() {
return Stream.of(arguments(new DualValue(randomPath(), "actual", "expected"), list(String.class)),
arguments(new DualValue(randomPath(), randomUUID(), randomUUID()), list(String.class, UUID.class)));
}
@Test
void should_evaluate_field_if_its_type_is_not_ignored() {
// GIVEN
DualValue dualValue = new DualValue(randomPath(), "actual", "expected");
recursiveComparisonConfiguration.ignoreFieldsOfTypes(UUID.class);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).isFalse();
}
@Test
void should_be_able_to_ignore_boolean() {
// GIVEN
DualValue dualValue = new DualValue(randomPath(), true, false);
recursiveComparisonConfiguration.ignoreFieldsOfTypes(boolean.class);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).isTrue();
}
@Test
void should_be_able_to_ignore_byte() {
// GIVEN
DualValue dualValue = new DualValue(randomPath(), (byte) 0, (byte) 1);
recursiveComparisonConfiguration.ignoreFieldsOfTypes(byte.class);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).isTrue();
}
@Test
void should_be_able_to_ignore_char() {
// GIVEN
DualValue dualValue = new DualValue(randomPath(), 'a', 'b');
recursiveComparisonConfiguration.ignoreFieldsOfTypes(char.class);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).isTrue();
}
@Test
void should_be_able_to_ignore_short() {
// GIVEN
DualValue dualValue = new DualValue(randomPath(), (short) 123, (short) 123);
recursiveComparisonConfiguration.ignoreFieldsOfTypes(short.class);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).isTrue();
}
@Test
void should_be_able_to_ignore_int() {
// GIVEN
DualValue dualValue = new DualValue(randomPath(), 123, 123);
recursiveComparisonConfiguration.ignoreFieldsOfTypes(int.class);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).isTrue();
}
@Test
void should_be_able_to_ignore_float() {
// GIVEN
DualValue dualValue = new DualValue(randomPath(), 123.0f, 123.0f);
recursiveComparisonConfiguration.ignoreFieldsOfTypes(float.class);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).isTrue();
}
@Test
void should_be_able_to_ignore_double() {
// GIVEN
DualValue dualValue = new DualValue(randomPath(), 123.0, 123.0);
recursiveComparisonConfiguration.ignoreFieldsOfTypes(double.class);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).isTrue();
}
@ParameterizedTest(name = "{0} should be ignored by specifying to ignore {1}")
@MethodSource
void should_be_able_to_ignore_primitive_field_by_specifying_their_wrapper_type(Object fieldValue, Class<?> wrapperType) {
// GIVEN
DualValue dualValue = new DualValue(randomPath(), fieldValue, fieldValue);
recursiveComparisonConfiguration.ignoreFieldsOfTypes(wrapperType);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).isTrue();
}
private static Stream<Arguments> should_be_able_to_ignore_primitive_field_by_specifying_their_wrapper_type() {
return Stream.of(arguments(false, Boolean.class),
arguments((byte) 0, Byte.class),
arguments('b', Character.class),
arguments(123, Integer.class),
arguments(123.0f, Float.class),
arguments(123.0, Double.class),
arguments((short) 123, Short.class));
}
@Test
void should_return_false_if_the_field_type_is_subtype_of_an_ignored_type() {
// GIVEN
DualValue dualValue = new DualValue(randomPath(), Double.MAX_VALUE, "expected");
recursiveComparisonConfiguration.ignoreFieldsOfTypes(Number.class);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).isFalse();
}
@Test
void should_not_ignore_actual_null_fields_for_specified_types_if_strictTypeChecking_is_disabled() {
// GIVEN
DualValue dualValue = new DualValue(randomPath(), null, "expected");
recursiveComparisonConfiguration.strictTypeChecking(false);
recursiveComparisonConfiguration.ignoreFieldsOfTypes(String.class);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).isFalse();
}
@Test
void should_not_evaluate_actual_null_fields_for_specified_types_if_strictTypeChecking_is_enabled_and_expected_is_not_null() {
// GIVEN
DualValue dualValue = new DualValue(randomPath(), null, "expected");
recursiveComparisonConfiguration.strictTypeChecking(true);
recursiveComparisonConfiguration.ignoreFieldsOfTypes(String.class);
// WHEN
boolean ignored = recursiveComparisonConfiguration.shouldNotEvaluate(dualValue);
// THEN
then(ignored).isTrue();
}
@Test
void should_treat_empty_compared_fields_as_not_restricting_comparison() {
// GIVEN
recursiveComparisonConfiguration.compareOnlyFields();
// WHEN
boolean shouldBeCompared = !recursiveComparisonConfiguration.shouldNotEvaluate(dualValueWithPath("name"));
// THEN
then(shouldBeCompared).isTrue();
}
static DualValue dualValue(Object value1, Object value2) {
return new DualValue(randomPath(), value1, value2);
}
}
|
RecursiveComparisonConfiguration_shouldNotEvaluate_Test
|
java
|
quarkusio__quarkus
|
extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/globals/TemplateGlobalInvalidNameTest.java
|
{
"start": 1763,
"end": 1866
}
|
class ____ {
@TemplateGlobal(name = "-name!")
static String user = "Fu";
}
}
|
Globals
|
java
|
elastic__elasticsearch
|
libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyCheckerImpl.java
|
{
"start": 23192,
"end": 23888
}
|
class ____ for sun.net.www classes as java.base does not export them
private static boolean isFileUrlConnection(java.net.URLConnection urlConnection) {
var connectionClass = urlConnection.getClass();
return "sun.net.www.protocol.file.FileURLConnection".equals(connectionClass.getName());
}
@Override
public void checkURLFileRead(Class<?> callerClass, URL url) {
try {
checkFileRead(callerClass, Paths.get(url.toURI()));
} catch (URISyntaxException e) {
// We expect this method to be called only on File URLs; otherwise the underlying method would fail anyway
throw new RuntimeException(e);
}
}
}
|
names
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/invoke/convert/IsoOffsetDateTimeConverter.java
|
{
"start": 1220,
"end": 1645
}
|
class ____ implements Converter<String, OffsetDateTime> {
@Override
public @Nullable OffsetDateTime convert(String source) {
if (StringUtils.hasLength(source)) {
return OffsetDateTime.parse(source, DateTimeFormatter.ISO_OFFSET_DATE_TIME);
}
return null;
}
public static void registerConverter(ConverterRegistry registry) {
registry.addConverter(new IsoOffsetDateTimeConverter());
}
}
|
IsoOffsetDateTimeConverter
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/htmlunit/server/WebTestClientHtmlUnitDriverBuilderTests.java
|
{
"start": 2837,
"end": 3897
}
|
class ____ {
@GetMapping(path = "/", produces = MediaType.TEXT_HTML_VALUE)
String view(@CookieValue(required = false) String cookieName) {
// @formatter:off
return "<html>\n"
+ "<head>\n"
+ "<title>Hello World</title>\n"
+ "</head>\n"
+ "<body>\n"
+ "<h1>"
+ TextEscapeUtils.escapeEntities(cookieName)
+ "</h1>\n"
+ "</body>\n"
+ "</html>";
// @formatter:on
}
@GetMapping("/cookie")
Mono<Void> setCookie(ServerHttpResponse response) {
response.addCookie(ResponseCookie.from("cookieName", "theCookie").build());
return redirect(response);
}
private Mono<Void> redirect(ServerHttpResponse response) {
response.setStatusCode(HttpStatus.MOVED_PERMANENTLY);
response.getHeaders().setLocation(URI.create("/"));
return response.setComplete();
}
@GetMapping("/cookie/delete")
Mono<Void> deleteCookie(ServerHttpResponse response) {
response.addCookie(ResponseCookie.from("cookieName", "").maxAge(Duration.ofSeconds(0)).build());
return redirect(response);
}
}
}
|
CookieController
|
java
|
netty__netty
|
codec-classes-quic/src/main/java/io/netty/handler/codec/quic/QuicheQuicChannel.java
|
{
"start": 3515,
"end": 3599
}
|
enum ____ {
OPEN,
ACTIVE,
CLOSED
}
private
|
ChannelState
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArrays_assertHasOnlyElementsOfType_Test.java
|
{
"start": 1133,
"end": 2595
}
|
class ____ extends ObjectArraysBaseTest {
private static final Object[] arrayOfNumbers = { 6, 7.0, 8L };
@Test
void should_pass_if_actual_has_only_elements_of_the_expected_type() {
arrays.assertHasOnlyElementsOfType(INFO, arrayOfNumbers, Number.class);
}
@Test
void should_fail_if_actual_is_null() {
// WHEN
var error = expectAssertionError(() -> arrays.assertHasOnlyElementsOfType(INFO, null, String.class));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_throw_exception_if_expected_type_is_null() {
assertThatNullPointerException().isThrownBy(() -> arrays.assertHasOnlyElementsOfType(INFO, arrayOfNumbers, null));
}
@Test
void should_fail_if_one_element_in_actual_does_not_belong_to_the_expected_type() {
// WHEN
var error = expectAssertionError(() -> arrays.assertHasOnlyElementsOfType(INFO, arrayOfNumbers, Long.class));
// THEN
then(error).hasMessage(shouldHaveOnlyElementsOfType(arrayOfNumbers, Long.class, Integer.class).create());
}
@Test
void should_throw_assertion_error_and_not_null_pointer_exception_on_null_elements() {
// GIVEN
Object[] array = array(null, "notNull");
// WHEN
var error = expectAssertionError(() -> arrays.assertHasOnlyElementsOfType(INFO, array, String.class));
// THEN
then(error).hasMessage(shouldHaveOnlyElementsOfType(array, String.class, null).create());
}
}
|
ObjectArrays_assertHasOnlyElementsOfType_Test
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
|
{
"start": 88325,
"end": 89465
}
|
class ____ implements
SingleArcTransition<JobImpl, JobEvent> {
JobStateInternal terminationState = null;
String jobHistoryString = null;
public InternalTerminationTransition(JobStateInternal stateInternal,
String jobHistoryString) {
this.terminationState = stateInternal;
//mostly a hack for jbhistoryserver
this.jobHistoryString = jobHistoryString;
}
@Override
public void transition(JobImpl job, JobEvent event) {
//TODO Is this JH event required.
job.setFinishTime();
JobUnsuccessfulCompletionEvent failedEvent =
new JobUnsuccessfulCompletionEvent(job.oldJobId,
job.finishTime,
job.succeededMapTaskCount,
job.succeededReduceTaskCount,
job.failedMapTaskCount,
job.failedReduceTaskCount,
job.killedMapTaskCount,
job.killedReduceTaskCount,
jobHistoryString, job.diagnostics);
job.eventHandler.handle(new JobHistoryEvent(job.jobId, failedEvent));
job.finished(terminationState);
}
}
private static
|
InternalTerminationTransition
|
java
|
apache__flink
|
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/LogicalTypesTest.java
|
{
"start": 40268,
"end": 41497
}
|
enum ____ {
YES,
NO
}
private StructuredType createUserType(
StructuredRegistered registered,
StructuredFinal isFinal,
StructuredClassResolved resolvedClass) {
final StructuredType.Builder builder;
if (registered == StructuredRegistered.YES) {
builder =
StructuredType.newBuilder(ObjectIdentifier.of("cat", "db", "User"), User.class);
} else if (resolvedClass == StructuredClassResolved.YES) {
builder = StructuredType.newBuilder(User.class);
} else {
builder = StructuredType.newBuilder(User.class.getName());
}
return builder.attributes(
Arrays.asList(
new StructuredAttribute("setting", UDT_SETTING_TYPE),
new StructuredAttribute("timestamp", UDT_TIMESTAMP_TYPE)))
.description("User type desc.")
.setFinal(isFinal == StructuredFinal.YES)
.setInstantiable(true)
.superType(createHumanType(false))
.build();
}
@SuppressWarnings("unused")
private abstract static
|
StructuredClassResolved
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/hql/CollectionMapWithComponentValueTest.java
|
{
"start": 9397,
"end": 9644
}
|
class ____ {
@Id
@GeneratedValue
Long id;
String name;
@ManyToOne(fetch = FetchType.LAZY)
BaseTestEntity base;
public KeyValue() {
}
public KeyValue(String name) {
this.name = name;
}
}
@Embeddable
public static
|
KeyValue
|
java
|
processing__processing4
|
app/src/processing/app/syntax/InputHandler.java
|
{
"start": 16286,
"end": 17341
}
|
class ____ implements ActionListener {
public void actionPerformed(ActionEvent evt) {
JEditTextArea textArea = getTextArea(evt);
int start = textArea.getSelectionStart();
if (start != textArea.getSelectionStop()) {
textArea.setSelectedText("");
}
int line = textArea.getCaretLine();
int lineStart = textArea.getLineStartOffset(line);
int caret = start - lineStart;
String lineText =
textArea.getLineText(textArea.getCaretLine());
if (caret == lineText.length()) {
if (lineStart + caret == textArea.getDocumentLength()) {
textArea.getToolkit().beep();
return;
}
caret++;
} else {
String noWordSep = (String)textArea.getDocument().getProperty("noWordSep");
caret = findWordEnd(lineText,caret,noWordSep);
}
try {
textArea.getDocument().remove(start, (caret + lineStart) - start);
} catch(BadLocationException bl) {
bl.printStackTrace();
}
}
}
public static
|
delete_word
|
java
|
dropwizard__dropwizard
|
dropwizard-request-logging/src/main/java/io/dropwizard/request/logging/layout/LogbackAccessRequestLayoutFactory.java
|
{
"start": 363,
"end": 625
}
|
class ____ implements LayoutFactory<IAccessEvent> {
@Override
public PatternLayoutBase<IAccessEvent> build(LoggerContext context, TimeZone timeZone) {
return new LogbackAccessRequestLayout(context, timeZone);
}
}
|
LogbackAccessRequestLayoutFactory
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/internal/VertxBootstrap.java
|
{
"start": 1129,
"end": 4386
}
|
interface ____ {
/**
* @return a new fresh to use bootstrap
*/
static VertxBootstrap create() {
return new VertxBootstrapImpl();
}
/**
* @return the vertx options
*/
VertxOptions options();
/**
* Set the {@code options} to use.
*
* @param options the options instance
* @return this builder instance
*/
VertxBootstrap options(VertxOptions options);
/**
* Set whether to enable shadow contexts.
*
* @param option the value
* @return this builder instance
*/
VertxBootstrap enableShadowContext(boolean option);
/**
* Set an event executor {@code provider} to use.
*
* @param provider a provider to use
* @return this builder instance
*/
VertxBootstrap eventExecutorProvider(EventExecutorProvider provider);
/**
* @return the event executor provider to use
*/
EventExecutorProvider eventExecutorProvider();
/**
* @return the {@code FileResolver} instance to use
*/
FileResolver fileResolver();
/**
* Set the {@code FileResolver} instance to use.
*
* @param resolver the file resolver
* @return this builder instance
*/
VertxBootstrap fileResolver(FileResolver resolver);
/**
* @return the tracer factory instance to use
*/
VertxTracerFactory tracerFactory();
/**
* Set the tracer factory to use.
*
* @param factory the factory
* @return this builder instance
*/
VertxBootstrap tracerFactory(VertxTracerFactory factory);
/**
* @return the metrics factory instance to use
*/
VertxMetricsFactory metricsFactory();
/**
* Set the metrics factory instance to use.
*
* @param factory the factory
* @return this builder instance
*/
VertxBootstrap metricsFactory(VertxMetricsFactory factory);
/**
* @return the {@code ExecutorServiceFactory} to use
*/
ExecutorServiceFactory executorServiceFactory();
/**
* Set the {@code ExecutorServiceFactory} instance to use.
*
* @param factory the factory
* @return this builder instance
*/
VertxBootstrap executorServiceFactory(ExecutorServiceFactory factory);
/**
* @return the {@code VertxThreadFactory} to use
*/
VertxThreadFactory threadFactory();
/**
* Set the {@code VertxThreadFactory} instance to use.
*
* @param factory the metrics
* @return this builder instance
*/
VertxBootstrap threadFactory(VertxThreadFactory factory);
/**
* @return the transport to use
*/
Transport transport();
/**
* Set the transport to for building Vertx.
* @param transport the transport
* @return this builder instance
*/
VertxBootstrapImpl transport(Transport transport);
/**
* @return the cluster manager to use
*/
ClusterManager clusterManager();
/**
* Set the cluster manager to use.
*
* @param clusterManager the cluster manager
* @return this builder instance
*/
VertxBootstrap clusterManager(ClusterManager clusterManager);
/**
* Initialize the service providers.
*
* @return this builder instance
*/
VertxBootstrap init();
/**
* Build and return the vertx instance
*/
Vertx vertx();
/**
* Build and return the clustered vertx instance
*/
Future<Vertx> clusteredVertx();
}
|
VertxBootstrap
|
java
|
grpc__grpc-java
|
okhttp/third_party/okhttp/main/java/io/grpc/okhttp/internal/Platform.java
|
{
"start": 15884,
"end": 18372
}
|
class ____ extends Platform {
private final Method putMethod;
private final Method getMethod;
private final Method removeMethod;
private final Class<?> clientProviderClass;
private final Class<?> serverProviderClass;
public JdkWithJettyBootPlatform(Method putMethod, Method getMethod, Method removeMethod,
Class<?> clientProviderClass, Class<?> serverProviderClass, Provider provider) {
super(provider);
this.putMethod = putMethod;
this.getMethod = getMethod;
this.removeMethod = removeMethod;
this.clientProviderClass = clientProviderClass;
this.serverProviderClass = serverProviderClass;
}
@Override
public TlsExtensionType getTlsExtensionType() {
return TlsExtensionType.ALPN_AND_NPN;
}
@Override public void configureTlsExtensions(
SSLSocket sslSocket, String hostname, List<Protocol> protocols) {
List<String> names = new ArrayList<>(protocols.size());
for (int i = 0, size = protocols.size(); i < size; i++) {
Protocol protocol = protocols.get(i);
if (protocol == Protocol.HTTP_1_0) continue; // No HTTP/1.0 for ALPN.
names.add(protocol.toString());
}
try {
Object provider = Proxy.newProxyInstance(Platform.class.getClassLoader(),
new Class<?>[] { clientProviderClass, serverProviderClass }, new JettyNegoProvider(names));
putMethod.invoke(null, sslSocket, provider);
} catch (InvocationTargetException e) {
throw new AssertionError(e);
} catch (IllegalAccessException e) {
throw new AssertionError(e);
}
}
@Override public void afterHandshake(SSLSocket sslSocket) {
try {
removeMethod.invoke(null, sslSocket);
} catch (IllegalAccessException ignored) {
throw new AssertionError();
} catch (InvocationTargetException ex) {
// This would be very surprising and there's not much to do about it
logger.log(Level.FINE, "Failed to remove SSLSocket from Jetty ALPN", ex);
}
}
@Override public String getSelectedProtocol(SSLSocket socket) {
try {
JettyNegoProvider provider =
(JettyNegoProvider) Proxy.getInvocationHandler(getMethod.invoke(null, socket));
if (!provider.unsupported && provider.selected == null) {
logger.log(Level.INFO, "ALPN callback dropped: SPDY and HTTP/2 are disabled. "
+ "Is alpn-boot on the boot
|
JdkWithJettyBootPlatform
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/test/MemoryUtils.java
|
{
"start": 2883,
"end": 3016
}
|
class ____ check that some {@link Tracked} objects (emulating off heap objects)
* are explicitly released.
*/
public static final
|
to
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/Person.java
|
{
"start": 275,
"end": 437
}
|
class ____ {
@Id
private String ssn;
private Person() {
}
public Person(String ssn) {
this.ssn = ssn;
}
public String getSsn() {
return ssn;
}
}
|
Person
|
java
|
apache__camel
|
core/camel-management/src/test/java/org/apache/camel/management/ManagedRouteStopUsingMBeanAPITest.java
|
{
"start": 1310,
"end": 2686
}
|
class ____ extends ManagementTestSupport {
@Test
public void testStopRoute() throws Exception {
// fire a message to get it running
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
MBeanServer mbeanServer = getMBeanServer();
Set<ObjectName> set = mbeanServer.queryNames(new ObjectName("*:type=routes,*"), null);
assertEquals(1, set.size());
ObjectName on = set.iterator().next();
ManagedRouteMBean mbean
= context.getManagementStrategy().getManagementAgent().newProxyClient(on, ManagedRouteMBean.class);
// the route has this starting endpoint uri
assertEquals("direct://start", mbean.getEndpointUri());
// should be started
assertEquals(ServiceStatus.Started.name(), mbean.getState(), "Should be started");
mbean.stop();
// should be stopped
assertEquals(ServiceStatus.Stopped.name(), mbean.getState(), "Should be stopped");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("log:foo").to("mock:result");
}
};
}
}
|
ManagedRouteStopUsingMBeanAPITest
|
java
|
quarkusio__quarkus
|
test-framework/common/src/main/java/io/quarkus/test/common/DefaultDockerContainerLauncher.java
|
{
"start": 1443,
"end": 15390
}
|
class ____ implements DockerContainerArtifactLauncher {
private static final Logger log = Logger.getLogger(DefaultDockerContainerLauncher.class);
private int httpPort;
private int httpsPort;
private long waitTimeSeconds;
private String testProfile;
private List<String> argLine;
private Map<String, String> env;
private ArtifactLauncher.InitContext.DevServicesLaunchResult devServicesLaunchResult;
private String containerImage;
private boolean pullRequired;
private Map<Integer, Integer> additionalExposedPorts;
private Map<String, String> volumeMounts;
private Map<String, String> labels;
private final Map<String, String> systemProps = new HashMap<>();
private boolean isSsl;
private final String containerName = "quarkus-integration-test-" + RandomStringUtils.insecure().next(5, true, false);
private String containerRuntimeBinaryName;
private final ExecutorService executorService = Executors.newSingleThreadExecutor();
private Optional<String> entryPoint;
private List<String> programArgs;
@Override
public void init(DockerContainerArtifactLauncher.DockerInitContext initContext) {
this.httpPort = initContext.httpPort();
this.httpsPort = initContext.httpsPort();
this.waitTimeSeconds = initContext.waitTime().getSeconds();
this.testProfile = initContext.testProfile();
this.argLine = initContext.argLine();
this.env = initContext.env();
this.devServicesLaunchResult = initContext.getDevServicesLaunchResult();
this.containerImage = initContext.containerImage();
this.pullRequired = initContext.pullRequired();
this.additionalExposedPorts = initContext.additionalExposedPorts();
this.volumeMounts = initContext.volumeMounts();
this.labels = initContext.labels();
this.entryPoint = initContext.entryPoint();
this.programArgs = initContext.programArgs();
}
@Override
public LaunchResult runToCompletion(String[] argz) {
try {
final ContainerRuntimeUtil.ContainerRuntime containerRuntime = ContainerRuntimeUtil.detectContainerRuntime();
containerRuntimeBinaryName = containerRuntime.getExecutableName();
if (pullRequired) {
log.infof("Pulling container image '%s'", containerImage);
try {
int pullResult = new ProcessBuilder().redirectError(DISCARD).redirectOutput(DISCARD)
.command(containerRuntimeBinaryName, "pull", containerImage).start().waitFor();
if (pullResult > 0) {
throw new RuntimeException("Pulling container image '" + containerImage + "' completed unsuccessfully");
}
} catch (InterruptedException e) {
throw new RuntimeException("Unable to pull container image '" + containerImage + "'", e);
}
}
System.setProperty("test.url", TestHTTPResourceManager.getUri());
final List<String> args = new ArrayList<>();
args.add(containerRuntimeBinaryName);
args.add("run");
if (!argLine.isEmpty()) {
args.addAll(argLine);
}
args.add("--name");
args.add(containerName);
args.add("-i"); // Interactive, write logs to stdout
args.add("--rm");
if (!volumeMounts.isEmpty()) {
args.addAll(NativeImageBuildLocalContainerRunner.getVolumeAccessArguments(containerRuntime));
}
if (httpPort != 0) {
args.add("-p");
args.add(httpPort + ":" + httpPort);
}
if (httpsPort != 0) {
args.add("-p");
args.add(httpsPort + ":" + httpsPort);
}
if (entryPoint.isPresent()) {
args.add("--entrypoint");
args.add(entryPoint.get());
}
for (Map.Entry<Integer, Integer> entry : additionalExposedPorts.entrySet()) {
args.add("-p");
args.add(entry.getKey() + ":" + entry.getValue());
}
for (Map.Entry<String, String> entry : volumeMounts.entrySet()) {
NativeImageBuildLocalContainerRunner.addVolumeParameter(entry.getKey(), entry.getValue(), args,
containerRuntime);
}
// if the dev services resulted in creating a dedicated network, then use it
if (devServicesLaunchResult.networkId() != null) {
args.add("--net=" + devServicesLaunchResult.networkId());
}
args.addAll(toEnvVar("quarkus.log.category.\"io.quarkus\".level", "INFO"));
if (DefaultJarLauncher.HTTP_PRESENT) {
args.addAll(toEnvVar("quarkus.http.port", "" + httpPort));
args.addAll(toEnvVar("quarkus.http.ssl-port", "" + httpsPort));
// This won't be correct when using the random port, but it's really only used by us for the rest client tests
// in the main module, since those tests hit the application itself
args.addAll(toEnvVar("test.url", TestHTTPResourceManager.getUri()));
}
if (testProfile != null) {
args.addAll(toEnvVar("quarkus.profile", testProfile));
}
for (var e : systemProps.entrySet()) {
args.addAll(toEnvVar(e.getKey(), e.getValue()));
}
for (var e : env.entrySet()) {
args.addAll(envAsLaunchArg(e.getKey(), e.getValue()));
}
for (var e : labels.entrySet()) {
args.add("--label");
args.add(e.getKey() + "=" + e.getValue());
}
args.add(containerImage);
args.addAll(programArgs);
args.addAll(Arrays.asList(argz));
log.infof("Executing \"%s\"", String.join(" ", args));
final Process containerProcess = new ProcessBuilder(args).start();
ProcessReader error = new ProcessReader(containerProcess.getErrorStream());
ProcessReader stdout = new ProcessReader(containerProcess.getInputStream());
Thread t1 = new Thread(error, "Error stream reader");
t1.start();
Thread t2 = new Thread(stdout, "Stdout stream reader");
t2.start();
t1.join();
t2.join();
byte[] s = stdout.get();
byte[] e = error.get();
return new LaunchResult(containerProcess.waitFor(), s, e);
} catch (IOException | InterruptedException ex) {
throw new RuntimeException("Running to completion failed.", ex);
}
}
@Override
public void start() throws IOException {
SmallRyeConfig config = ConfigProvider.getConfig().unwrap(SmallRyeConfig.class);
LogRuntimeConfig logRuntimeConfig = config.getConfigMapping(LogRuntimeConfig.class);
final ContainerRuntime containerRuntime = ContainerRuntimeUtil.detectContainerRuntime();
containerRuntimeBinaryName = containerRuntime.getExecutableName();
if (pullRequired) {
log.infof("Pulling container image '%s'", containerImage);
try {
int pullResult = new ProcessBuilder().redirectError(DISCARD).redirectOutput(DISCARD)
.command(containerRuntimeBinaryName, "pull", containerImage).start().waitFor();
if (pullResult > 0) {
throw new RuntimeException("Pulling container image '" + containerImage + "' completed unsuccessfully");
}
} catch (InterruptedException e) {
throw new RuntimeException("Unable to pull container image '" + containerImage + "'", e);
}
}
System.setProperty("test.url", TestHTTPResourceManager.getUri());
if (httpPort == 0) {
httpPort = getRandomPort();
}
if (httpsPort == 0) {
httpsPort = getRandomPort();
}
final List<String> args = new ArrayList<>();
args.add(containerRuntimeBinaryName);
args.add("run");
if (!argLine.isEmpty()) {
args.addAll(argLine);
}
args.add("--name");
args.add(containerName);
args.add("-i"); // Interactive, write logs to stdout
args.add("--rm");
if (!volumeMounts.isEmpty()) {
args.addAll(NativeImageBuildLocalContainerRunner.getVolumeAccessArguments(containerRuntime));
}
args.add("-p");
args.add(httpPort + ":" + httpPort);
args.add("-p");
args.add(httpsPort + ":" + httpsPort);
if (entryPoint.isPresent()) {
args.add("--entrypoint");
args.add(entryPoint.get());
}
for (Map.Entry<Integer, Integer> entry : additionalExposedPorts.entrySet()) {
args.add("-p");
args.add(entry.getKey() + ":" + entry.getValue());
}
for (Map.Entry<String, String> entry : volumeMounts.entrySet()) {
NativeImageBuildLocalContainerRunner.addVolumeParameter(entry.getKey(), entry.getValue(), args, containerRuntime);
}
// if the dev services resulted in creating a dedicated network, then use it
if (devServicesLaunchResult.networkId() != null) {
args.add("--net=" + devServicesLaunchResult.networkId());
}
args.addAll(toEnvVar("quarkus.log.category.\"io.quarkus\".level", "INFO"));
if (DefaultJarLauncher.HTTP_PRESENT) {
args.addAll(toEnvVar("quarkus.http.port", "" + httpPort));
args.addAll(toEnvVar("quarkus.http.ssl-port", "" + httpsPort));
// This won't be correct when using the random port, but it's really only used by us for the rest client tests
// in the main module, since those tests hit the application itself
args.addAll(toEnvVar("test.url", TestHTTPResourceManager.getUri()));
}
if (testProfile != null) {
args.addAll(toEnvVar("quarkus.profile", testProfile));
}
for (var e : systemProps.entrySet()) {
args.addAll(toEnvVar(e.getKey(), e.getValue()));
}
for (var e : env.entrySet()) {
args.addAll(envAsLaunchArg(e.getKey(), e.getValue()));
}
for (var e : labels.entrySet()) {
args.add("--label");
args.add(e.getKey() + "=" + e.getValue());
}
args.add(containerImage);
args.addAll(programArgs);
final Path logPath = logRuntimeConfig.file().path().toPath();
try {
Files.deleteIfExists(logPath);
if (logPath.getParent() != null) {
Files.createDirectories(logPath.getParent());
}
} catch (FileSystemException e) {
log.warnf("Log file %s deletion failed, could happen on Windows, we can carry on.", logPath);
}
log.infof("Executing \"%s\"", String.join(" ", args));
final Function<IntegrationTestStartedNotifier.Context, IntegrationTestStartedNotifier.Result> startedFunction = createStartedFunction();
// We rely on the container writing log to stdout. If it just writes to a logfile inside itself, we would have
// to mount /work/ directory to get quarkus.log.
final Process containerProcess = new ProcessBuilder(args)
.redirectErrorStream(true)
.redirectOutput(ProcessBuilder.Redirect.appendTo(logPath.toFile()))
.start();
if (startedFunction != null) {
final IntegrationTestStartedNotifier.Result result = waitForStartedFunction(startedFunction, containerProcess,
waitTimeSeconds, logPath);
isSsl = result.isSsl();
} else {
log.info("Wait for server to start by capturing listening data...");
final ListeningAddress result = waitForCapturedListeningData(containerProcess, logPath, waitTimeSeconds);
log.infof("Server started on port %s", result.getPort());
updateConfigForPort(result.getPort());
isSsl = result.isSsl();
}
}
private int getRandomPort() throws IOException {
try (ServerSocket socket = new ServerSocket(0)) {
return socket.getLocalPort();
}
}
public boolean listensOnSsl() {
return isSsl;
}
public void includeAsSysProps(Map<String, String> systemProps) {
this.systemProps.putAll(systemProps);
}
private static List<String> envAsLaunchArg(String name, String value) {
return List.of("--env", String.format("%s=%s", name, value));
}
private List<String> toEnvVar(String property, String value) {
if ((property != null) && (!property.isEmpty())) {
return envAsLaunchArg(convertPropertyToEnvVar(property), value);
}
return Collections.emptyList();
}
private String convertPropertyToEnvVar(String property) {
return StringUtil.replaceNonAlphanumericByUnderscores(property).toUpperCase();
}
@Override
public void close() {
log.info("Close the container");
try {
final Process dockerStopProcess = new ProcessBuilder(containerRuntimeBinaryName, "stop", containerName)
.redirectError(DISCARD)
.redirectOutput(DISCARD).start();
log.debug("Wait for container to stop");
dockerStopProcess.waitFor(10, TimeUnit.SECONDS);
} catch (IOException | InterruptedException e) {
log.errorf("Unable to stop container '%s'", containerName);
}
log.debug("Container stopped");
executorService.shutdown();
}
}
|
DefaultDockerContainerLauncher
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/requests/SaslAuthenticateRequest.java
|
{
"start": 1618,
"end": 3375
}
|
class ____ extends AbstractRequest.Builder<SaslAuthenticateRequest> {
private final SaslAuthenticateRequestData data;
public Builder(SaslAuthenticateRequestData data) {
super(ApiKeys.SASL_AUTHENTICATE);
this.data = data;
}
@Override
public SaslAuthenticateRequest build(short version) {
return new SaslAuthenticateRequest(data, version);
}
@Override
public String toString() {
return "(type=SaslAuthenticateRequest)";
}
}
private final SaslAuthenticateRequestData data;
public SaslAuthenticateRequest(SaslAuthenticateRequestData data, short version) {
super(ApiKeys.SASL_AUTHENTICATE, version);
this.data = data;
}
@Override
public SaslAuthenticateRequestData data() {
return data;
}
@Override
public AbstractResponse getErrorResponse(int throttleTimeMs, Throwable e) {
ApiError apiError = ApiError.fromThrowable(e);
SaslAuthenticateResponseData response = new SaslAuthenticateResponseData()
.setErrorCode(apiError.error().code())
.setErrorMessage(apiError.message());
return new SaslAuthenticateResponse(response);
}
public static SaslAuthenticateRequest parse(Readable readable, short version) {
return new SaslAuthenticateRequest(new SaslAuthenticateRequestData(readable, version),
version);
}
// Do not print authBytes, overwrite a temp copy of the data with empty bytes
@Override
public String toString() {
SaslAuthenticateRequestData tempData = data.duplicate();
tempData.setAuthBytes(new byte[0]);
return tempData.toString();
}
}
|
Builder
|
java
|
hibernate__hibernate-orm
|
tooling/metamodel-generator/src/main/java/org/hibernate/processor/annotation/AnnotationMetaMap.java
|
{
"start": 277,
"end": 1260
}
|
class ____ extends AnnotationMetaCollection {
private final String keyType;
public AnnotationMetaMap(AnnotationMetaEntity parent, Element element, String collectionType,
String keyType, String elementType) {
super( parent, element, collectionType, elementType );
this.keyType = keyType;
}
@Override
public String getAttributeDeclarationString() {
return new StringBuilder()
.append("\n/**\n * Static metamodel for attribute {@link ")
.append( parent.getQualifiedName() )
.append("#")
.append( element.getSimpleName() )
.append("}\n **/\n")
.append("public static volatile ")
.append( parent.importType( getMetaType() ) )
.append("<")
.append( parent.importType( parent.getQualifiedName() ) )
.append(", ")
.append( parent.importType(keyType) )
.append(", ")
.append( parent.importType( getTypeDeclaration() ) )
.append("> ")
.append( getPropertyName() )
.append(";")
.toString();
}
}
|
AnnotationMetaMap
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/RowType.java
|
{
"start": 2731,
"end": 10119
}
|
class ____ implements Serializable {
private static final long serialVersionUID = 1L;
public static final String FIELD_FORMAT_WITH_DESCRIPTION = "%s %s '%s'";
public static final String FIELD_FORMAT_NO_DESCRIPTION = "%s %s";
private final String name;
private final LogicalType type;
private final @Nullable String description;
public RowField(String name, LogicalType type, @Nullable String description) {
this.name = Preconditions.checkNotNull(name, "Field name must not be null.");
this.type = Preconditions.checkNotNull(type, "Field type must not be null.");
this.description = description;
}
public RowField(String name, LogicalType type) {
this(name, type, null);
}
public String getName() {
return name;
}
public LogicalType getType() {
return type;
}
public Optional<String> getDescription() {
return Optional.ofNullable(description);
}
public RowField copy() {
return new RowField(name, type.copy(), description);
}
public String asSummaryString() {
return formatString(type.asSummaryString(), true);
}
public String asSerializableString() {
return formatString(type.asSerializableString(), false);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
RowField rowField = (RowField) o;
return name.equals(rowField.name)
&& type.equals(rowField.type)
&& Objects.equals(description, rowField.description);
}
@Override
public int hashCode() {
return Objects.hash(name, type, description);
}
private String formatString(String typeString, boolean excludeDescription) {
if (description == null) {
return String.format(
FIELD_FORMAT_NO_DESCRIPTION, escapeIdentifier(name), typeString);
} else if (excludeDescription) {
return String.format(
FIELD_FORMAT_WITH_DESCRIPTION, escapeIdentifier(name), typeString, "...");
} else {
return String.format(
FIELD_FORMAT_WITH_DESCRIPTION,
escapeIdentifier(name),
typeString,
escapeSingleQuotes(description));
}
}
}
private final List<RowField> fields;
public RowType(boolean isNullable, List<RowField> fields) {
super(isNullable, LogicalTypeRoot.ROW);
this.fields =
Collections.unmodifiableList(
new ArrayList<>(
Preconditions.checkNotNull(fields, "Fields must not be null.")));
validateFields(fields);
}
public RowType(List<RowField> fields) {
this(true, fields);
}
public List<RowField> getFields() {
return fields;
}
public List<String> getFieldNames() {
return fields.stream().map(RowField::getName).collect(Collectors.toList());
}
public LogicalType getTypeAt(int i) {
return fields.get(i).getType();
}
public int getFieldCount() {
return fields.size();
}
public int getFieldIndex(String fieldName) {
for (int i = 0; i < fields.size(); i++) {
if (fields.get(i).getName().equals(fieldName)) {
return i;
}
}
return -1;
}
@Override
public LogicalType copy(boolean isNullable) {
return new RowType(
isNullable, fields.stream().map(RowField::copy).collect(Collectors.toList()));
}
@Override
public String asSummaryString() {
return withNullability(
FORMAT,
fields.stream().map(RowField::asSummaryString).collect(Collectors.joining(", ")));
}
@Override
public String asSerializableString() {
return withNullability(
FORMAT,
fields.stream()
.map(RowField::asSerializableString)
.collect(Collectors.joining(", ")));
}
@Override
public boolean supportsInputConversion(Class<?> clazz) {
return INPUT_OUTPUT_CONVERSION.contains(clazz.getName());
}
@Override
public boolean supportsOutputConversion(Class<?> clazz) {
return INPUT_OUTPUT_CONVERSION.contains(clazz.getName());
}
@Override
public Class<?> getDefaultConversion() {
return DEFAULT_CONVERSION;
}
@Override
public List<LogicalType> getChildren() {
return Collections.unmodifiableList(
fields.stream().map(RowField::getType).collect(Collectors.toList()));
}
@Override
public <R> R accept(LogicalTypeVisitor<R> visitor) {
return visitor.visit(this);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
RowType rowType = (RowType) o;
return fields.equals(rowType.fields);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), fields);
}
// --------------------------------------------------------------------------------------------
private static void validateFields(List<RowField> fields) {
final List<String> fieldNames =
fields.stream().map(f -> f.name).collect(Collectors.toList());
if (fieldNames.stream().anyMatch(StringUtils::isNullOrWhitespaceOnly)) {
throw new ValidationException(
"Field names must contain at least one non-whitespace character.");
}
final Set<String> duplicates =
fieldNames.stream()
.filter(n -> Collections.frequency(fieldNames, n) > 1)
.collect(Collectors.toSet());
if (!duplicates.isEmpty()) {
throw new ValidationException(
String.format("Field names must be unique. Found duplicates: %s", duplicates));
}
}
public static RowType of(LogicalType... types) {
return of(true, types);
}
public static RowType of(boolean isNullable, LogicalType... types) {
final List<RowField> fields = new ArrayList<>();
for (int i = 0; i < types.length; i++) {
fields.add(new RowField("f" + i, types[i]));
}
return new RowType(isNullable, fields);
}
public static RowType of(LogicalType[] types, String[] names) {
return of(true, types, names);
}
public static RowType of(boolean nullable, LogicalType[] types, String[] names) {
List<RowField> fields = new ArrayList<>();
for (int i = 0; i < types.length; i++) {
fields.add(new RowField(names[i], types[i]));
}
return new RowType(nullable, fields);
}
}
|
RowField
|
java
|
google__guava
|
android/guava/src/com/google/common/primitives/Shorts.java
|
{
"start": 22985,
"end": 26322
}
|
class ____ extends AbstractList<Short>
implements RandomAccess, Serializable {
final short[] array;
final int start;
final int end;
ShortArrayAsList(short[] array) {
this(array, 0, array.length);
}
ShortArrayAsList(short[] array, int start, int end) {
this.array = array;
this.start = start;
this.end = end;
}
@Override
public int size() {
return end - start;
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public Short get(int index) {
checkElementIndex(index, size());
return array[start + index];
}
@Override
public boolean contains(@Nullable Object target) {
// Overridden to prevent a ton of boxing
return (target instanceof Short) && Shorts.indexOf(array, (Short) target, start, end) != -1;
}
@Override
public int indexOf(@Nullable Object target) {
// Overridden to prevent a ton of boxing
if (target instanceof Short) {
int i = Shorts.indexOf(array, (Short) target, start, end);
if (i >= 0) {
return i - start;
}
}
return -1;
}
@Override
public int lastIndexOf(@Nullable Object target) {
// Overridden to prevent a ton of boxing
if (target instanceof Short) {
int i = Shorts.lastIndexOf(array, (Short) target, start, end);
if (i >= 0) {
return i - start;
}
}
return -1;
}
@Override
public Short set(int index, Short element) {
checkElementIndex(index, size());
short oldValue = array[start + index];
// checkNotNull for GWT (do not optimize)
array[start + index] = checkNotNull(element);
return oldValue;
}
@Override
public List<Short> subList(int fromIndex, int toIndex) {
int size = size();
checkPositionIndexes(fromIndex, toIndex, size);
if (fromIndex == toIndex) {
return Collections.emptyList();
}
return new ShortArrayAsList(array, start + fromIndex, start + toIndex);
}
@Override
public boolean equals(@Nullable Object object) {
if (object == this) {
return true;
}
if (object instanceof ShortArrayAsList) {
ShortArrayAsList that = (ShortArrayAsList) object;
int size = size();
if (that.size() != size) {
return false;
}
for (int i = 0; i < size; i++) {
if (array[start + i] != that.array[that.start + i]) {
return false;
}
}
return true;
}
return super.equals(object);
}
@Override
public int hashCode() {
int result = 1;
for (int i = start; i < end; i++) {
result = 31 * result + Short.hashCode(array[i]);
}
return result;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder(size() * 6);
builder.append('[').append(array[start]);
for (int i = start + 1; i < end; i++) {
builder.append(", ").append(array[i]);
}
return builder.append(']').toString();
}
short[] toShortArray() {
return Arrays.copyOfRange(array, start, end);
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
}
|
ShortArrayAsList
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/http/codec/JacksonCodecSupport.java
|
{
"start": 3199,
"end": 6175
}
|
class ____: ";
protected final Log logger = HttpLogging.forLogName(getClass());
private final T defaultMapper;
protected @Nullable Map<Class<?>, Map<MimeType, T>> mapperRegistrations;
private final List<MimeType> mimeTypes;
private static volatile @Nullable List<JacksonModule> modules = null;
/**
* Construct a new instance with the provided {@link MapperBuilder builder}
* customized with the {@link tools.jackson.databind.JacksonModule}s found
* by {@link MapperBuilder#findModules(ClassLoader)} and {@link MimeType}s.
*/
protected JacksonCodecSupport(MapperBuilder<T, ?> builder, MimeType... mimeTypes) {
Assert.notNull(builder, "MapperBuilder must not be null");
Assert.notEmpty(mimeTypes, "MimeTypes must not be empty");
this.defaultMapper = builder.addModules(initModules()).build();
this.mimeTypes = List.of(mimeTypes);
}
/**
* Construct a new instance with the provided {@link ObjectMapper}
* customized with the {@link tools.jackson.databind.JacksonModule}s found
* by {@link MapperBuilder#findModules(ClassLoader)} and {@link MimeType}s.
*/
protected JacksonCodecSupport(T mapper, MimeType... mimeTypes) {
Assert.notNull(mapper, "ObjectMapper must not be null");
Assert.notEmpty(mimeTypes, "MimeTypes must not be empty");
this.defaultMapper = mapper;
this.mimeTypes = List.of(mimeTypes);
}
private List<JacksonModule> initModules() {
if (modules == null) {
modules = MapperBuilder.findModules(JacksonCodecSupport.class.getClassLoader());
}
return Objects.requireNonNull(modules);
}
/**
* Return the {@link ObjectMapper configured} default mapper.
*/
public T getMapper() {
return this.defaultMapper;
}
/**
* Configure the {@link ObjectMapper} instances to use for the given
* {@link Class}. This is useful when you want to deviate from the
* {@link #getMapper() default} ObjectMapper or have the
* {@code ObjectMapper} vary by {@code MediaType}.
* <p><strong>Note:</strong> Use of this method effectively turns off use of
* the default {@link #getMapper() ObjectMapper} and supported
* {@link #getMimeTypes() MimeTypes} for the given class. Therefore it is
* important for the mappings configured here to
* {@link MediaType#includes(MediaType) include} every MediaType that must
* be supported for the given class.
* @param clazz the type of Object to register ObjectMapper instances for
* @param registrar a consumer to populate or otherwise update the
* MediaType-to-ObjectMapper associations for the given Class
*/
public void registerMappersForType(Class<?> clazz, Consumer<Map<MimeType, T>> registrar) {
if (this.mapperRegistrations == null) {
this.mapperRegistrations = new LinkedHashMap<>();
}
Map<MimeType, T> registrations =
this.mapperRegistrations.computeIfAbsent(clazz, c -> new LinkedHashMap<>());
registrar.accept(registrations);
}
/**
* Return ObjectMapper registrations for the given class, if any.
* @param clazz the
|
argument
|
java
|
apache__maven
|
api/maven-api-core/src/main/java/org/apache/maven/api/services/Prompter.java
|
{
"start": 1158,
"end": 3589
}
|
interface ____ extends Service {
/**
* Prompts the user for a string.
*
* @param message the message to display to the user
* @return the string entered by the user
* @throws PrompterException if an exception occurs
*/
@Nonnull
default String prompt(@Nullable String message) throws PrompterException {
return prompt(message, null, null);
}
/**
* Prompts the user for a string using a default value.
*
* @param message the message to display
* @param defaultReply the default reply value
* @return the string entered by the user
* @throws PrompterException if an exception occurs
*/
@Nonnull
default String prompt(@Nullable String message, @Nullable String defaultReply) throws PrompterException {
return prompt(message, null, defaultReply);
}
/**
* Prompts the user for a string using a list of possible values.
*
* @param message the message to display
* @param possibleValues the list of possible values
* @return the string entered by the user
* @throws PrompterException if an exception occurs
*/
@Nonnull
default String prompt(@Nullable String message, @Nullable List<String> possibleValues) throws PrompterException {
return prompt(message, possibleValues, null);
}
/**
* Prompts the user for a string using a list of possible values and a default reply.
*
* @param message the message to display
* @param possibleValues the list of possible values
* @param defaultReply the default reply value
* @return the string entered by the user
* @throws PrompterException if an exception occurs
*/
@Nonnull
String prompt(@Nullable String message, @Nullable List<String> possibleValues, @Nullable String defaultReply)
throws PrompterException;
/**
* Prompts the user for a password.
*
* @param message the message to display
* @return the password entered by the user
* @throws PrompterException if an exception occurs
*/
@Nonnull
String promptForPassword(@Nullable String message) throws PrompterException;
/**
* Displays a message to the user.
*
* @param message the message to display
* @throws PrompterException if an exception occurs
*/
void showMessage(@Nullable String message) throws PrompterException;
}
|
Prompter
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/test/java/org/springframework/web/reactive/function/client/WebClientDataBufferAllocatingTests.java
|
{
"start": 2010,
"end": 8131
}
|
class ____ extends AbstractDataBufferAllocatingTests {
private static final Duration DELAY = Duration.ofSeconds(5);
private final ReactorResourceFactory factory = new ReactorResourceFactory();
private MockWebServer server;
private WebClient webClient;
@BeforeAll
void setUpReactorResourceFactory() {
this.factory.setShutdownQuietPeriod(Duration.ofMillis(100));
this.factory.afterPropertiesSet();
}
@AfterAll
void destroyReactorResourceFactory() {
this.factory.destroy();
}
private void setUp(DataBufferFactory bufferFactory) throws IOException {
super.bufferFactory = bufferFactory;
this.server = new MockWebServer();
this.server.start();
this.webClient = WebClient
.builder()
.clientConnector(initConnector())
.baseUrl(this.server.url("/").toString())
.build();
}
private ReactorClientHttpConnector initConnector() {
assertThat(super.bufferFactory).isNotNull();
if (super.bufferFactory instanceof NettyDataBufferFactory nettyDataBufferFactory) {
ByteBufAllocator allocator = nettyDataBufferFactory.getByteBufAllocator();
return new ReactorClientHttpConnector(this.factory,
client -> client.option(ChannelOption.ALLOCATOR, allocator));
}
else {
return new ReactorClientHttpConnector();
}
}
@ParameterizedDataBufferAllocatingTest
void bodyToMonoVoid(DataBufferFactory bufferFactory) throws IOException {
setUp(bufferFactory);
this.server.enqueue(new MockResponse.Builder().
code(201)
.setHeader("Content-Type", "application/json")
.chunkedBody("{\"foo\" : {\"bar\" : \"123\", \"baz\" : \"456\"}}", 5)
.build());
Mono<Void> mono = this.webClient.get()
.uri("/json").accept(MediaType.APPLICATION_JSON)
.retrieve()
.bodyToMono(Void.class);
StepVerifier.create(mono).expectComplete().verify(Duration.ofSeconds(3));
assertThat(this.server.getRequestCount()).isEqualTo(1);
}
@ParameterizedDataBufferAllocatingTest // SPR-17482
void bodyToMonoVoidWithoutContentType(DataBufferFactory bufferFactory) throws IOException {
setUp(bufferFactory);
this.server.enqueue(new MockResponse.Builder()
.code(HttpStatus.ACCEPTED.value())
.chunkedBody("{\"foo\" : \"123\", \"baz\" : \"456\", \"baz\" : \"456\"}", 5)
.build());
Mono<Map<String, String>> mono = this.webClient.get()
.uri("/sample").accept(MediaType.APPLICATION_JSON)
.retrieve()
.bodyToMono(new ParameterizedTypeReference<>() {});
StepVerifier.create(mono).expectError(WebClientResponseException.class).verify(Duration.ofSeconds(3));
assertThat(this.server.getRequestCount()).isEqualTo(1);
}
@ParameterizedDataBufferAllocatingTest
void onStatusWithBodyNotConsumed(DataBufferFactory bufferFactory) throws IOException {
setUp(bufferFactory);
RuntimeException ex = new RuntimeException("response error");
testOnStatus(ex, response -> Mono.just(ex));
}
@ParameterizedDataBufferAllocatingTest
void onStatusWithBodyConsumed(DataBufferFactory bufferFactory) throws IOException {
setUp(bufferFactory);
RuntimeException ex = new RuntimeException("response error");
testOnStatus(ex, response -> response.bodyToMono(Void.class).thenReturn(ex));
}
@ParameterizedDataBufferAllocatingTest // SPR-17473
void onStatusWithMonoErrorAndBodyNotConsumed(DataBufferFactory bufferFactory) throws IOException {
setUp(bufferFactory);
RuntimeException ex = new RuntimeException("response error");
testOnStatus(ex, response -> Mono.error(ex));
}
@ParameterizedDataBufferAllocatingTest
void onStatusWithMonoErrorAndBodyConsumed(DataBufferFactory bufferFactory) throws IOException {
setUp(bufferFactory);
RuntimeException ex = new RuntimeException("response error");
testOnStatus(ex, response -> response.bodyToMono(Void.class).then(Mono.error(ex)));
}
@ParameterizedDataBufferAllocatingTest // gh-23230
void onStatusWithImmediateErrorAndBodyNotConsumed(DataBufferFactory bufferFactory) throws IOException {
setUp(bufferFactory);
RuntimeException ex = new RuntimeException("response error");
testOnStatus(ex, response -> {
throw ex;
});
}
@ParameterizedDataBufferAllocatingTest
void releaseBody(DataBufferFactory bufferFactory) throws IOException {
setUp(bufferFactory);
this.server.enqueue(new MockResponse.Builder()
.code(200)
.setHeader("Content-Type", "text/plain")
.body("foo bar")
.build());
Mono<Void> result = this.webClient.get()
.exchangeToMono(ClientResponse::releaseBody);
StepVerifier.create(result)
.expectComplete()
.verify(Duration.ofSeconds(3));
}
@ParameterizedDataBufferAllocatingTest
void exchangeToBodilessEntity(DataBufferFactory bufferFactory) throws IOException {
setUp(bufferFactory);
this.server.enqueue(new MockResponse.Builder()
.code(201)
.setHeader("Foo", "bar")
.body("foo bar")
.build());
Mono<ResponseEntity<Void>> result = this.webClient.get()
.exchangeToMono(ClientResponse::toBodilessEntity);
StepVerifier.create(result)
.assertNext(entity -> {
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.CREATED);
assertThat(entity.getHeaders().hasHeaderValues("Foo", Collections.singletonList("bar"))).isTrue();
assertThat(entity.getBody()).isNull();
})
.expectComplete()
.verify(Duration.ofSeconds(3));
}
private void testOnStatus(Throwable expected,
Function<ClientResponse, Mono<? extends Throwable>> exceptionFunction) {
HttpStatus errorStatus = HttpStatus.BAD_GATEWAY;
this.server.enqueue(new MockResponse.Builder()
.code(errorStatus.value())
.setHeader("Content-Type", "application/json")
.chunkedBody("{\"error\" : {\"status\" : 502, \"message\" : \"Bad gateway.\"}}", 5)
.build());
Mono<String> mono = this.webClient.get()
.uri("/json").accept(MediaType.APPLICATION_JSON)
.retrieve()
.onStatus(status -> status.equals(errorStatus), exceptionFunction)
.bodyToMono(String.class);
StepVerifier.create(mono).expectErrorSatisfies(actual -> assertThat(actual).isSameAs(expected)).verify(DELAY);
assertThat(this.server.getRequestCount()).isEqualTo(1);
}
}
|
WebClientDataBufferAllocatingTests
|
java
|
apache__kafka
|
storage/src/test/java/org/apache/kafka/tiered/storage/utils/DumpLocalTieredStorage.java
|
{
"start": 990,
"end": 1310
}
|
class ____ {
public static <K, V> String dump(LocalTieredStorage storage, Deserializer<K> keyDe, Deserializer<V> valueDe) {
LocalTieredStorageOutput<K, V> output = new LocalTieredStorageOutput<>(keyDe, valueDe);
storage.traverse(output);
return output.getOutput();
}
}
|
DumpLocalTieredStorage
|
java
|
apache__camel
|
components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/fix/BindySimpleKeyValuePairTabUnmarshallTest.java
|
{
"start": 2037,
"end": 2340
}
|
class ____ extends RouteBuilder {
BindyKeyValuePairDataFormat kvpBindyDataFormat = new BindyKeyValuePairDataFormat(Order.class);
@Override
public void configure() {
from(URI_FILE_FIX_TAB).unmarshal(kvpBindyDataFormat).to(URI_MOCK_RESULT);
}
}
}
|
ContextConfig
|
java
|
netty__netty
|
example/src/main/java/io/netty/example/udt/echo/message/MsgEchoClientHandler.java
|
{
"start": 1130,
"end": 2148
}
|
class ____ extends SimpleChannelInboundHandler<UdtMessage> {
private final UdtMessage message;
public MsgEchoClientHandler() {
super(false);
final ByteBuf byteBuf = Unpooled.buffer(MsgEchoClient.SIZE);
for (int i = 0; i < byteBuf.capacity(); i++) {
byteBuf.writeByte((byte) i);
}
message = new UdtMessage(byteBuf);
}
@Override
public void channelActive(final ChannelHandlerContext ctx) {
System.err.println("ECHO active " + NioUdtProvider.socketUDT(ctx.channel()).toStringOptions());
ctx.writeAndFlush(message);
}
@Override
public void channelRead0(ChannelHandlerContext ctx, UdtMessage msg) {
ctx.write(msg);
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.flush();
}
@Override
public void exceptionCaught(final ChannelHandlerContext ctx, final Throwable cause) {
cause.printStackTrace();
ctx.close();
}
}
|
MsgEchoClientHandler
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/ClasComponentBuilderFactory.java
|
{
"start": 1507,
"end": 1830
}
|
class ____.
*
* Category: core,script
* Since: 2.4
* Maven coordinates: org.apache.camel:camel-bean
*
* @return the dsl builder
*/
static ClasComponentBuilder clas() {
return new ClasComponentBuilderImpl();
}
/**
* Builder for the Class component.
*/
|
name
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/support/ContextTypeMatchClassLoader.java
|
{
"start": 1567,
"end": 2678
}
|
class ____ extends DecoratingClassLoader implements SmartClassLoader {
static {
ClassLoader.registerAsParallelCapable();
}
private static final @Nullable Method findLoadedClassMethod;
static {
// Try to enable findLoadedClass optimization which allows us to selectively
// override classes that have not been loaded yet. If not accessible, we will
// always override requested classes, even when the classes have been loaded
// by the parent ClassLoader already and cannot be transformed anymore anyway.
Method method;
try {
method = ClassLoader.class.getDeclaredMethod("findLoadedClass", String.class);
ReflectionUtils.makeAccessible(method);
}
catch (Throwable ex) {
// Typically a JDK 9+ InaccessibleObjectException...
// Avoid through JVM startup with --add-opens=java.base/java.lang=ALL-UNNAMED
method = null;
LogFactory.getLog(ContextTypeMatchClassLoader.class).debug(
"ClassLoader.findLoadedClass not accessible -> will always override requested class", ex);
}
findLoadedClassMethod = method;
}
/** Cache for byte array per
|
ContextTypeMatchClassLoader
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankRequestTaskSettingsTests.java
|
{
"start": 469,
"end": 1464
}
|
class ____ extends ESTestCase {
public void testFromMap_ReturnsEmptySettings_IfMapEmpty() {
var requestTaskSettings = GoogleVertexAiRerankRequestTaskSettings.fromMap(new HashMap<>());
assertThat(requestTaskSettings, is(GoogleVertexAiRerankRequestTaskSettings.EMPTY_SETTINGS));
}
public void testFromMap_DoesNotThrowValidationException_IfTopNIsMissing() {
var requestTaskSettings = GoogleVertexAiRerankRequestTaskSettings.fromMap(new HashMap<>(Map.of("unrelated", 1)));
assertThat(requestTaskSettings, is(new GoogleVertexAiRerankRequestTaskSettings(null)));
}
public void testFromMap_ExtractsTopN() {
var topN = 1;
var requestTaskSettings = GoogleVertexAiRerankRequestTaskSettings.fromMap(
new HashMap<>(Map.of(GoogleVertexAiRerankTaskSettings.TOP_N, topN))
);
assertThat(requestTaskSettings, is(new GoogleVertexAiRerankRequestTaskSettings(topN)));
}
}
|
GoogleVertexAiRerankRequestTaskSettingsTests
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/java/typeutils/PojoTypeExtractionTest.java
|
{
"start": 6379,
"end": 6587
}
|
class ____ extends GenericPojoGetterSetterCheck<String> {
public void setPackageProtected(String in) {
this.packageProtected = in;
}
}
public static
|
TypedPojoGetterSetterCheck
|
java
|
apache__kafka
|
connect/runtime/src/test/java/org/apache/kafka/connect/converters/NumberConverterTest.java
|
{
"start": 1448,
"end": 4178
}
|
class ____<T extends Number> {
private static final String TOPIC = "topic";
private static final String HEADER_NAME = "header";
private T[] samples;
private Schema schema;
private NumberConverter<T> converter;
private Serializer<T> serializer;
protected abstract T[] samples();
protected abstract NumberConverter<T> createConverter();
protected abstract Serializer<T> createSerializer();
protected abstract Schema schema();
@BeforeEach
public void setup() {
converter = createConverter();
serializer = createSerializer();
schema = schema();
samples = samples();
}
@Test
public void testConvertingSamplesToAndFromBytes() throws UnsupportedOperationException {
for (T sample : samples) {
byte[] expected = serializer.serialize(TOPIC, sample);
// Data conversion
assertArrayEquals(expected, converter.fromConnectData(TOPIC, schema, sample));
SchemaAndValue data = converter.toConnectData(TOPIC, expected);
assertEquals(schema, data.schema());
assertEquals(sample, data.value());
// Header conversion
assertArrayEquals(expected, converter.fromConnectHeader(TOPIC, HEADER_NAME, schema, sample));
data = converter.toConnectHeader(TOPIC, HEADER_NAME, expected);
assertEquals(schema, data.schema());
assertEquals(sample, data.value());
}
}
@Test
public void testDeserializingDataWithTooManyBytes() {
assertThrows(DataException.class, () -> converter.toConnectData(TOPIC, new byte[10]));
}
@Test
public void testDeserializingHeaderWithTooManyBytes() {
assertThrows(DataException.class, () -> converter.toConnectHeader(TOPIC, HEADER_NAME, new byte[10]));
}
@Test
public void testSerializingIncorrectType() {
assertThrows(DataException.class, () -> converter.fromConnectData(TOPIC, schema, "not a valid number"));
}
@Test
public void testSerializingIncorrectHeader() {
assertThrows(DataException.class,
() -> converter.fromConnectHeader(TOPIC, HEADER_NAME, schema, "not a valid number"));
}
@Test
public void testNullToBytes() {
assertNull(converter.fromConnectData(TOPIC, schema, null));
}
@Test
public void testBytesNullToNumber() {
SchemaAndValue data = converter.toConnectData(TOPIC, null);
assertEquals(schema(), data.schema());
assertNull(data.value());
}
@Test
public void testInheritedVersionRetrievedFromAppInfoParser() {
assertEquals(AppInfoParser.getVersion(), converter.version());
}
}
|
NumberConverterTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesAction.java
|
{
"start": 583,
"end": 894
}
|
class ____ extends ActionType<UpdateDesiredNodesResponse> {
public static final UpdateDesiredNodesAction INSTANCE = new UpdateDesiredNodesAction();
public static final String NAME = "cluster:admin/desired_nodes/update";
UpdateDesiredNodesAction() {
super(NAME);
}
}
|
UpdateDesiredNodesAction
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/ChainMapper.java
|
{
"start": 4733,
"end": 7176
}
|
class ____ add.
* @param inputKeyClass mapper input key class.
* @param inputValueClass mapper input value class.
* @param outputKeyClass mapper output key class.
* @param outputValueClass mapper output value class.
* @param byValue indicates if key/values should be passed by value
* to the next Mapper in the chain, if any.
* @param mapperConf a JobConf with the configuration for the Mapper
* class. It is recommended to use a JobConf without default values using the
* <code>JobConf(boolean loadDefaults)</code> constructor with FALSE.
*/
public static <K1, V1, K2, V2> void addMapper(JobConf job,
Class<? extends Mapper<K1, V1, K2, V2>> klass,
Class<? extends K1> inputKeyClass,
Class<? extends V1> inputValueClass,
Class<? extends K2> outputKeyClass,
Class<? extends V2> outputValueClass,
boolean byValue, JobConf mapperConf) {
job.setMapperClass(ChainMapper.class);
job.setMapOutputKeyClass(outputKeyClass);
job.setMapOutputValueClass(outputValueClass);
Chain.addMapper(true, job, klass, inputKeyClass, inputValueClass,
outputKeyClass, outputValueClass, byValue, mapperConf);
}
private Chain chain;
/**
* Constructor.
*/
public ChainMapper() {
chain = new Chain(true);
}
/**
* Configures the ChainMapper and all the Mappers in the chain.
* <p>
* If this method is overriden <code>super.configure(...)</code> should be
* invoked at the beginning of the overwriter method.
*/
public void configure(JobConf job) {
chain.configure(job);
}
/**
* Chains the <code>map(...)</code> methods of the Mappers in the chain.
*/
@SuppressWarnings({"unchecked"})
public void map(Object key, Object value, OutputCollector output,
Reporter reporter) throws IOException {
Mapper mapper = chain.getFirstMap();
if (mapper != null) {
mapper.map(key, value, chain.getMapperCollector(0, output, reporter),
reporter);
}
}
/**
* Closes the ChainMapper and all the Mappers in the chain.
* <p>
* If this method is overriden <code>super.close()</code> should be
* invoked at the end of the overwriter method.
*/
public void close() throws IOException {
chain.close();
}
}
|
to
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/node/Node.java
|
{
"start": 34394,
"end": 35215
}
|
class ____ implements Function<BoundTransportAddress, DiscoveryNode> {
private final SetOnce<DiscoveryNode> localNode = new SetOnce<>();
private final String persistentNodeId;
private final Settings settings;
LocalNodeFactory(Settings settings, String persistentNodeId) {
this.persistentNodeId = persistentNodeId;
this.settings = settings;
}
@Override
public DiscoveryNode apply(BoundTransportAddress boundTransportAddress) {
localNode.set(DiscoveryNode.createLocal(settings, boundTransportAddress.publishAddress(), persistentNodeId));
return localNode.get();
}
DiscoveryNode getNode() {
assert localNode.get() != null;
return localNode.get();
}
}
}
|
LocalNodeFactory
|
java
|
apache__spark
|
common/unsafe/src/main/java/org/apache/spark/sql/catalyst/util/CollationSupport.java
|
{
"start": 15441,
"end": 17326
}
|
class ____ {
public static UTF8String exec(final UTF8String src, final UTF8String search,
final UTF8String replace, final int collationId) {
CollationFactory.Collation collation = CollationFactory.fetchCollation(collationId);
// Space trimming does not affect the output of this expression.
if (collation.isUtf8BinaryType) {
return execBinary(src, search, replace);
} else if (collation.isUtf8LcaseType) {
return execLowercase(src, search, replace);
} else {
return execICU(src, search, replace, collationId);
}
}
public static String genCode(final String src, final String search, final String replace,
final int collationId) {
CollationFactory.Collation collation = CollationFactory.fetchCollation(collationId);
String expr = "CollationSupport.StringReplace.exec";
if (collation.isUtf8BinaryType) {
return String.format(expr + "Binary(%s, %s, %s)", src, search, replace);
} else if (collation.isUtf8LcaseType) {
return String.format(expr + "Lowercase(%s, %s, %s)", src, search, replace);
} else {
return String.format(expr + "ICU(%s, %s, %s, %d)", src, search, replace, collationId);
}
}
public static UTF8String execBinary(final UTF8String src, final UTF8String search,
final UTF8String replace) {
return src.replace(search, replace);
}
public static UTF8String execLowercase(final UTF8String src, final UTF8String search,
final UTF8String replace) {
return CollationAwareUTF8String.lowercaseReplace(src, search, replace);
}
public static UTF8String execICU(final UTF8String src, final UTF8String search,
final UTF8String replace, final int collationId) {
return CollationAwareUTF8String.replace(src, search, replace, collationId);
}
}
public static
|
StringReplace
|
java
|
elastic__elasticsearch
|
distribution/tools/geoip-cli/src/main/java/org/elasticsearch/geoip/GeoIpCliProvider.java
|
{
"start": 595,
"end": 802
}
|
class ____ implements CliToolProvider {
@Override
public String name() {
return "geoip";
}
@Override
public Command create() {
return new GeoIpCli();
}
}
|
GeoIpCliProvider
|
java
|
google__guava
|
android/guava/src/com/google/common/primitives/ImmutableIntArray.java
|
{
"start": 17943,
"end": 22489
}
|
class ____ extends AbstractList<Integer>
implements RandomAccess, Serializable {
private final ImmutableIntArray parent;
private AsList(ImmutableIntArray parent) {
this.parent = parent;
}
// inherit: isEmpty, containsAll, toArray x2, iterator, listIterator, stream, forEach, mutations
@Override
public int size() {
return parent.length();
}
@Override
public Integer get(int index) {
return parent.get(index);
}
@Override
public boolean contains(@Nullable Object target) {
return indexOf(target) >= 0;
}
@Override
public int indexOf(@Nullable Object target) {
return target instanceof Integer ? parent.indexOf((Integer) target) : -1;
}
@Override
public int lastIndexOf(@Nullable Object target) {
return target instanceof Integer ? parent.lastIndexOf((Integer) target) : -1;
}
@Override
public List<Integer> subList(int fromIndex, int toIndex) {
return parent.subArray(fromIndex, toIndex).asList();
}
// The default List spliterator is not efficiently splittable
@Override
/*
* This is an override that is not directly visible to callers, so NewApi will catch calls to
* Collection.spliterator() where necessary.
*/
@IgnoreJRERequirement
public Spliterator<Integer> spliterator() {
return parent.spliterator();
}
@Override
public boolean equals(@Nullable Object object) {
if (object instanceof AsList) {
AsList that = (AsList) object;
return this.parent.equals(that.parent);
}
// We could delegate to super now but it would still box too much
if (!(object instanceof List)) {
return false;
}
List<?> that = (List<?>) object;
if (this.size() != that.size()) {
return false;
}
int i = parent.start;
// Since `that` is very likely RandomAccess we could avoid allocating this iterator...
for (Object element : that) {
if (!(element instanceof Integer) || parent.array[i++] != (Integer) element) {
return false;
}
}
return true;
}
// Because we happen to use the same formula. If that changes, just don't override this.
@Override
public int hashCode() {
return parent.hashCode();
}
@Override
public String toString() {
return parent.toString();
}
}
/**
* Returns {@code true} if {@code object} is an {@code ImmutableIntArray} containing the same
* values as this one, in the same order.
*/
@Override
public boolean equals(@Nullable Object object) {
if (object == this) {
return true;
}
if (!(object instanceof ImmutableIntArray)) {
return false;
}
ImmutableIntArray that = (ImmutableIntArray) object;
if (this.length() != that.length()) {
return false;
}
for (int i = 0; i < length(); i++) {
if (this.get(i) != that.get(i)) {
return false;
}
}
return true;
}
/** Returns an unspecified hash code for the contents of this immutable array. */
@Override
public int hashCode() {
int hash = 1;
for (int i = start; i < end; i++) {
hash *= 31;
hash += Integer.hashCode(array[i]);
}
return hash;
}
/**
* Returns a string representation of this array in the same form as {@link
* Arrays#toString(int[])}, for example {@code "[1, 2, 3]"}.
*/
@Override
public String toString() {
if (isEmpty()) {
return "[]";
}
StringBuilder builder = new StringBuilder(length() * 5); // rough estimate is fine
builder.append('[').append(array[start]);
for (int i = start + 1; i < end; i++) {
builder.append(", ").append(array[i]);
}
builder.append(']');
return builder.toString();
}
/**
* Returns an immutable array containing the same values as {@code this} array. This is logically
* a no-op, and in some circumstances {@code this} itself is returned. However, if this instance
* is a {@link #subArray} view of a larger array, this method will copy only the appropriate range
* of values, resulting in an equivalent array with a smaller memory footprint.
*/
public ImmutableIntArray trimmed() {
return isPartialView() ? new ImmutableIntArray(toArray()) : this;
}
private boolean isPartialView() {
return start > 0 || end < array.length;
}
Object writeReplace() {
return trimmed();
}
Object readResolve() {
return isEmpty() ? EMPTY : this;
}
}
|
AsList
|
java
|
quarkusio__quarkus
|
independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/model/PlatformImportsImpl.java
|
{
"start": 566,
"end": 9648
}
|
class ____ implements PlatformImports, Serializable {
private static final long serialVersionUID = -1722573527738064746L;
public static final String PROPERTY_PREFIX = "platform.release-info@";
public static final char PLATFORM_KEY_STREAM_SEPARATOR = '$';
public static final char STREAM_VERSION_SEPARATOR = '#';
private static int requiredIndex(String s, char c, int fromIndex) {
final int i = s.indexOf(c, fromIndex);
if (i < 0) {
throw new IllegalArgumentException("Failed to locate '" + c + "' in '" + s + "'");
}
return i;
}
public static boolean isPlatformReleaseInfo(String s) {
return s != null && s.startsWith(PROPERTY_PREFIX);
}
// metadata for each found platform release by platform key
private final Map<String, PlatformInfo> allPlatformInfo = new HashMap<>();
// imported platform BOMs by platform keys (groupId)
private final Map<String, Collection<ArtifactCoords>> importedPlatformBoms = new HashMap<>();
private final Map<ArtifactCoords, PlatformImport> platformImports = new HashMap<>();
final Map<String, String> collectedProps = new HashMap<String, String>();
private final Collection<ArtifactCoords> platformBoms = new ArrayList<>();
private final Collection<PlatformReleaseInfo> platformReleaseInfo = new ArrayList<>();
public PlatformImportsImpl() {
}
public Collection<PlatformReleaseInfo> getPlatformReleaseInfo() {
return platformReleaseInfo;
}
public Collection<ArtifactCoords> getImportedPlatformBoms() {
return platformBoms;
}
void addPlatformRelease(String propertyName, String propertyValue) {
final int platformKeyStreamSep = requiredIndex(propertyName, PLATFORM_KEY_STREAM_SEPARATOR, PROPERTY_PREFIX.length());
final int streamVersionSep = requiredIndex(propertyName, STREAM_VERSION_SEPARATOR, platformKeyStreamSep + 1);
final String platformKey = propertyName.substring(PROPERTY_PREFIX.length(), platformKeyStreamSep);
final String streamId = propertyName.substring(platformKeyStreamSep + 1, streamVersionSep);
final String version = propertyName.substring(streamVersionSep + 1);
allPlatformInfo.computeIfAbsent(platformKey, PlatformInfo::new).getOrCreateStream(streamId).addIfNotPresent(
version,
() -> {
final PlatformReleaseInfo ri = new PlatformReleaseInfo(platformKey, streamId, version, propertyValue);
platformReleaseInfo.add(ri);
return ri;
});
}
public void addPlatformDescriptor(String groupId, String artifactId, String classifier, String type, String version) {
final ArtifactCoords bomCoords = ArtifactCoords.pom(groupId,
artifactId.substring(0,
artifactId.length() - BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX.length()),
version);
platformImports.computeIfAbsent(bomCoords, this::newPlatformImport).descriptorFound = true;
}
public void addPlatformProperties(String groupId, String artifactId, String classifier, String type, String version,
Path propsPath) throws AppModelResolverException {
final ArtifactCoords bomCoords = ArtifactCoords.pom(groupId,
artifactId.substring(0,
artifactId.length() - BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX.length()),
version);
platformImports.computeIfAbsent(bomCoords, this::newPlatformImport);
importedPlatformBoms.computeIfAbsent(groupId, g -> new ArrayList<>());
if (!importedPlatformBoms.get(groupId).contains(bomCoords)) {
importedPlatformBoms.get(groupId).add(bomCoords);
final Properties props = new Properties();
try (InputStream is = Files.newInputStream(propsPath)) {
props.load(is);
} catch (IOException e) {
throw new AppModelResolverException("Failed to read properties from " + propsPath, e);
}
for (Map.Entry<?, ?> prop : props.entrySet()) {
final String name = String.valueOf(prop.getKey());
if (name.startsWith(BootstrapConstants.PLATFORM_PROPERTY_PREFIX)) {
if (isPlatformReleaseInfo(name)) {
addPlatformRelease(name, String.valueOf(prop.getValue()));
} else {
collectedProps.putIfAbsent(name, String.valueOf(prop.getValue().toString()));
}
}
}
}
}
/**
* This method is meant to be called when a new platform BOM import was detected.
*
* @param bom platform BOM coordinates
* @return new platform import instance
*/
private PlatformImport newPlatformImport(ArtifactCoords bom) {
platformBoms.add(bom);
return new PlatformImport();
}
public void setPlatformProperties(Map<String, String> platformProps) {
this.collectedProps.putAll(platformProps);
}
@Override
public Map<String, String> getPlatformProperties() {
return collectedProps;
}
@Override
public String getMisalignmentReport() {
StringWriter error = null;
for (Map.Entry<ArtifactCoords, PlatformImport> pi : platformImports.entrySet()) {
if (!pi.getValue().descriptorFound) {
if (error == null) {
error = new StringWriter();
error.append(
"The Quarkus platform properties applied to the project are missing the corresponding Quarkus platform BOM imports: ");
} else {
error.append(", ");
}
error.append(pi.getKey().toString());
}
}
if (error != null) {
return error.getBuffer().toString();
}
final Map<String, List<List<String>>> possibleAlignments = getPossibleAlignemnts(importedPlatformBoms);
if (possibleAlignments.isEmpty()) {
return null;
}
error = new StringWriter();
try (BufferedWriter writer = new BufferedWriter(error)) {
writer.append(
"Some of the imported Quarkus platform BOMs belong to different platform releases. To properly align the platform BOM imports, please, consider one of the following combinations:");
writer.newLine();
for (Map.Entry<String, List<List<String>>> entry : possibleAlignments.entrySet()) {
writer.append("For platform ").append(entry.getKey()).append(':');
writer.newLine();
int i = 1;
for (List<String> boms : entry.getValue()) {
writer.append(" ").append(String.valueOf(i++)).append(") ");
writer.newLine();
for (String bom : boms) {
writer.append(" - ").append(bom);
writer.newLine();
}
}
}
} catch (IOException e) {
// ignore
}
return error.toString();
}
@Override
public boolean isAligned() {
return isAligned(importedPlatformBoms);
}
boolean isAligned(Map<String, Collection<ArtifactCoords>> importedPlatformBoms) {
for (Map.Entry<ArtifactCoords, PlatformImport> pi : platformImports.entrySet()) {
if (!pi.getValue().descriptorFound) {
return false;
}
}
for (Map.Entry<String, Collection<ArtifactCoords>> platformImportedBoms : importedPlatformBoms.entrySet()) {
final PlatformInfo platformInfo = allPlatformInfo.get(platformImportedBoms.getKey());
if (platformInfo != null && !platformInfo.isAligned(platformImportedBoms.getValue())) {
return false;
}
}
return true;
}
private Map<String, List<List<String>>> getPossibleAlignemnts(
Map<String, Collection<ArtifactCoords>> importedPlatformBoms) {
final Map<String, List<List<String>>> alignments = new HashMap<>(importedPlatformBoms.size());
for (Map.Entry<String, Collection<ArtifactCoords>> platformImportedBoms : importedPlatformBoms.entrySet()) {
final PlatformInfo platformInfo = allPlatformInfo.get(platformImportedBoms.getKey());
if (platformInfo == null || platformInfo.isAligned(platformImportedBoms.getValue())) {
continue;
}
alignments.put(platformInfo.getPlatformKey(), platformInfo.getPossibleAlignments(platformImportedBoms.getValue()));
}
return alignments;
}
Collection<PlatformInfo> getPlatforms() {
return allPlatformInfo.values();
}
PlatformInfo getPlatform(String platformKey) {
return allPlatformInfo.get(platformKey);
}
private static
|
PlatformImportsImpl
|
java
|
elastic__elasticsearch
|
distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java
|
{
"start": 1719,
"end": 15136
}
|
class ____ extends ESTestCase {
public void testExtractValidHeapSizeUsingXmx() throws Exception {
assertThat(JvmOption.extractMaxHeapSize(JvmOption.findFinalOptions(Collections.singletonList("-Xmx2g"))), equalTo(2L << 30));
}
public void testExtractValidHeapSizeUsingMaxHeapSize() throws Exception {
assertThat(
JvmOption.extractMaxHeapSize(JvmOption.findFinalOptions(Collections.singletonList("-XX:MaxHeapSize=2g"))),
equalTo(2L << 30)
);
}
public void testExtractValidHeapSizeNoOptionPresent() throws Exception {
assertThat(JvmOption.extractMaxHeapSize(JvmOption.findFinalOptions(Collections.emptyList())), greaterThan(0L));
}
public void testHeapSizeInvalid() throws InterruptedException, IOException {
try {
JvmOption.extractMaxHeapSize(JvmOption.findFinalOptions(Collections.singletonList("-Xmx2Z")));
fail("expected starting java to fail");
} catch (final RuntimeException e) {
assertThat(e, hasToString(containsString(("starting java failed"))));
assertThat(e, hasToString(containsString(("Invalid maximum heap size: -Xmx2Z"))));
}
}
public void testHeapSizeTooSmall() throws Exception {
try {
JvmOption.extractMaxHeapSize(JvmOption.findFinalOptions(Collections.singletonList("-Xmx1024")));
fail("expected starting java to fail");
} catch (final RuntimeException e) {
assertThat(e, hasToString(containsString(("starting java failed"))));
assertThat(e, hasToString(containsString(("Too small maximum heap"))));
}
}
public void testHeapSizeWithSpace() throws Exception {
try {
JvmOption.extractMaxHeapSize(JvmOption.findFinalOptions(Collections.singletonList("-Xmx 1024")));
fail("expected starting java to fail");
} catch (final RuntimeException e) {
assertThat(e, hasToString(containsString(("starting java failed"))));
assertThat(e, hasToString(containsString(("Invalid maximum heap size: -Xmx 1024"))));
}
}
public void testMaxDirectMemorySizeUnset() throws Exception {
assertThat(JvmOption.extractMaxDirectMemorySize(JvmOption.findFinalOptions(Collections.singletonList("-Xmx1g"))), equalTo(0L));
}
public void testMaxDirectMemorySizeSet() throws Exception {
assertThat(
JvmOption.extractMaxDirectMemorySize(JvmOption.findFinalOptions(Arrays.asList("-Xmx1g", "-XX:MaxDirectMemorySize=512m"))),
equalTo(512L << 20)
);
}
public void testExtractSystemProperties() {
Map<String, String> expectedSystemProperties = new HashMap<>();
expectedSystemProperties.put("file.encoding", "UTF-8");
expectedSystemProperties.put("kv.setting", "ABC=DEF");
Map<String, String> parsedSystemProperties = JvmErgonomics.extractSystemProperties(
Arrays.asList("-Dfile.encoding=UTF-8", "-Dkv.setting=ABC=DEF")
);
assertEquals(expectedSystemProperties, parsedSystemProperties);
}
public void testG1GOptionsForSmallHeap() throws Exception {
List<String> jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseG1GC"), Settings.EMPTY);
assertThat(jvmErgonomics, hasItem("-XX:G1HeapRegionSize=4m"));
assertThat(jvmErgonomics, hasItem("-XX:InitiatingHeapOccupancyPercent=30"));
assertThat(jvmErgonomics, hasItem("-XX:G1ReservePercent=15"));
}
public void testG1GOptionsForSmallHeapWhenTuningSet() throws Exception {
List<String> jvmErgonomics = JvmErgonomics.choose(
Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseG1GC", "-XX:G1HeapRegionSize=4m", "-XX:InitiatingHeapOccupancyPercent=45"),
Settings.EMPTY
);
assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1HeapRegionSize="))));
assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:InitiatingHeapOccupancyPercent="))));
assertThat(jvmErgonomics, hasItem("-XX:G1ReservePercent=15"));
}
public void testG1GOptionsForLargeHeap() throws Exception {
List<String> jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms8g", "-Xmx8g", "-XX:+UseG1GC"), Settings.EMPTY);
assertThat(jvmErgonomics, hasItem("-XX:InitiatingHeapOccupancyPercent=30"));
assertThat(jvmErgonomics, hasItem("-XX:G1ReservePercent=25"));
assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1HeapRegionSize="))));
}
public void testG1GOptionsForSmallHeapWhenOtherGCSet() throws Exception {
List<String> jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseParallelGC"), Settings.EMPTY);
assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1HeapRegionSize="))));
assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:InitiatingHeapOccupancyPercent="))));
assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1ReservePercent="))));
}
public void testG1GOptionsForLargeHeapWhenTuningSet() throws Exception {
List<String> jvmErgonomics = JvmErgonomics.choose(
Arrays.asList("-Xms8g", "-Xmx8g", "-XX:+UseG1GC", "-XX:InitiatingHeapOccupancyPercent=60", "-XX:G1ReservePercent=10"),
Settings.EMPTY
);
assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:InitiatingHeapOccupancyPercent="))));
assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1ReservePercent="))));
assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1HeapRegionSize="))));
}
public void testExtractNoSystemProperties() {
Map<String, String> parsedSystemProperties = JvmErgonomics.extractSystemProperties(Arrays.asList("-Xms1024M", "-Xmx1024M"));
assertTrue(parsedSystemProperties.isEmpty());
}
public void testMaxDirectMemorySizeChoice() throws Exception {
final Map<String, String> heapMaxDirectMemorySize = Map.of(
"64M",
Long.toString((64L << 20) / 2),
"512M",
Long.toString((512L << 20) / 2),
"1024M",
Long.toString((1024L << 20) / 2),
"1G",
Long.toString((1L << 30) / 2),
"2048M",
Long.toString((2048L << 20) / 2),
"2G",
Long.toString((2L << 30) / 2),
"8G",
Long.toString((8L << 30) / 2)
);
final String heapSize = randomFrom(heapMaxDirectMemorySize.keySet().toArray(String[]::new));
assertThat(
JvmErgonomics.choose(Arrays.asList("-Xms" + heapSize, "-Xmx" + heapSize), Settings.EMPTY),
hasItem("-XX:MaxDirectMemorySize=" + heapMaxDirectMemorySize.get(heapSize))
);
}
public void testMaxDirectMemorySizeChoiceWhenSet() throws Exception {
assertThat(
JvmErgonomics.choose(Arrays.asList("-Xms1g", "-Xmx1g", "-XX:MaxDirectMemorySize=1g"), Settings.EMPTY),
everyItem(not(startsWith("-XX:MaxDirectMemorySize=")))
);
}
public void testConcGCThreadsNotSetBasedOnProcessors() throws Exception {
Settings.Builder nodeSettingsBuilder = Settings.builder()
.put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.SEARCH_ROLE.roleName());
if (randomBoolean()) {
int maxProcessors = Runtime.getRuntime().availableProcessors();
List<Integer> possibleProcessors = new ArrayList<>();
IntStream.range(1, maxProcessors + 1).filter(i -> i < 4 || i > 5).forEach(possibleProcessors::add);
nodeSettingsBuilder.put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), randomFrom(possibleProcessors));
}
assertThat(JvmErgonomics.choose(List.of(), nodeSettingsBuilder.build()), everyItem(not(startsWith("-XX:ConcGCThreads="))));
}
public void testConcGCThreadsNotSetBasedOnRoles() throws Exception {
Settings.Builder nodeSettingsBuilder = Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), between(4, 5));
if (randomBoolean()) {
List<DiscoveryNodeRole> possibleRoles = new ArrayList<>(DiscoveryNodeRole.roles());
possibleRoles.remove(DiscoveryNodeRole.SEARCH_ROLE);
possibleRoles.remove(DiscoveryNodeRole.VOTING_ONLY_NODE_ROLE);
nodeSettingsBuilder.put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), randomFrom(possibleRoles).roleName());
}
assertThat(JvmErgonomics.choose(List.of(), nodeSettingsBuilder.build()), everyItem(not(startsWith("-XX:ConcGCThreads="))));
}
public void testConcGCThreadsSet() throws Exception {
Settings nodeSettings = Settings.builder()
.put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), between(4, 5))
.put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.SEARCH_ROLE.roleName())
.build();
assertThat(JvmErgonomics.choose(List.of(), nodeSettings), hasItem("-XX:ConcGCThreads=2"));
}
public void testMinimumNewSizeNotSetBasedOnHeap() throws Exception {
Settings nodeSettings = Settings.builder()
.put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.SEARCH_ROLE.roleName())
.build();
List<String> chosen = JvmErgonomics.choose(List.of("-Xmx" + between(5, 31) + "g"), nodeSettings);
assertThat(chosen, everyItem(not(is("-XX:+UnlockExperimentalVMOptions"))));
assertThat(chosen, everyItem(not(startsWith("-XX:G1NewSizePercent="))));
}
public void testMinimumNewSizeNotSetBasedOnRoles() throws Exception {
Settings nodeSettings;
if (randomBoolean()) {
nodeSettings = Settings.EMPTY;
} else {
List<DiscoveryNodeRole> possibleRoles = new ArrayList<>(DiscoveryNodeRole.roles());
possibleRoles.remove(DiscoveryNodeRole.SEARCH_ROLE);
possibleRoles.remove(DiscoveryNodeRole.VOTING_ONLY_NODE_ROLE);
nodeSettings = Settings.builder()
.put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), randomFrom(possibleRoles).roleName())
.build();
}
List<String> chosen = JvmErgonomics.choose(List.of("-Xmx" + between(1, 4) + "g"), nodeSettings);
assertThat(chosen, everyItem(not(is("-XX:+UnlockExperimentalVMOptions"))));
assertThat(chosen, everyItem(not(startsWith("-XX:G1NewSizePercent="))));
}
public void testMinimumNewSizeSet() throws Exception {
Settings nodeSettings = Settings.builder()
.put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.SEARCH_ROLE.roleName())
.build();
List<String> chosen = JvmErgonomics.choose(List.of("-Xmx" + between(1, 4) + "g"), nodeSettings);
assertThat(chosen, hasItem("-XX:+UnlockExperimentalVMOptions"));
assertThat(chosen, hasItem("-XX:G1NewSizePercent=10"));
}
@SuppressWarnings("ConstantConditions")
public void testMissingOptionHandling() {
final Map<String, JvmOption> g1GcOn = Map.of("UseG1GC", new JvmOption("true", ""));
final Map<String, JvmOption> g1GcOff = Map.of("UseG1GC", new JvmOption("", ""));
assertFalse(JvmErgonomics.tuneG1GCHeapRegion(Map.of(), false));
assertThat(
expectThrows(IllegalStateException.class, () -> JvmErgonomics.tuneG1GCHeapRegion(Map.of(), true)).getMessage(),
allOf(containsString("[UseG1GC]"), containsString("unexpectedly missing"))
);
assertThat(
expectThrows(IllegalStateException.class, () -> JvmErgonomics.tuneG1GCHeapRegion(g1GcOn, true)).getMessage(),
allOf(containsString("[G1HeapRegionSize]"), containsString("unexpectedly missing"))
);
assertFalse(JvmErgonomics.tuneG1GCHeapRegion(g1GcOff, randomBoolean()));
assertThat(
expectThrows(IllegalStateException.class, () -> JvmErgonomics.tuneG1GCReservePercent(Map.of(), randomBoolean())).getMessage(),
allOf(containsString("[UseG1GC]"), containsString("unexpectedly missing"))
);
assertThat(
expectThrows(IllegalStateException.class, () -> JvmErgonomics.tuneG1GCReservePercent(g1GcOn, randomBoolean())).getMessage(),
allOf(containsString("[G1ReservePercent]"), containsString("unexpectedly missing"))
);
assertEquals(0, JvmErgonomics.tuneG1GCReservePercent(g1GcOff, randomBoolean()));
assertThat(
expectThrows(IllegalStateException.class, () -> JvmErgonomics.tuneG1GCInitiatingHeapOccupancyPercent(Map.of())).getMessage(),
allOf(containsString("[UseG1GC]"), containsString("unexpectedly missing"))
);
assertThat(
expectThrows(IllegalStateException.class, () -> JvmErgonomics.tuneG1GCInitiatingHeapOccupancyPercent(g1GcOn)).getMessage(),
allOf(containsString("[InitiatingHeapOccupancyPercent]"), containsString("unexpectedly missing"))
);
assertFalse(JvmErgonomics.tuneG1GCInitiatingHeapOccupancyPercent(g1GcOff));
assertThat(
expectThrows(IllegalStateException.class, () -> new JvmOption("OptionName", null)).getMessage(),
allOf(containsString("could not determine the origin of JVM option [OptionName]"), containsString("unsupported"))
);
}
}
|
JvmErgonomicsTests
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/XmlSignerEndpointBuilderFactory.java
|
{
"start": 46616,
"end": 46948
}
|
class ____ extends AbstractEndpointBuilder implements XmlSignerEndpointBuilder, AdvancedXmlSignerEndpointBuilder {
public XmlSignerEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new XmlSignerEndpointBuilderImpl(path);
}
}
|
XmlSignerEndpointBuilderImpl
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/clientproxy/packageprivate/PackagePrivateInterfaceInHierarchyTest.java
|
{
"start": 433,
"end": 799
}
|
class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(BaseInterface.class, MyInterface.class, MyInterface2.class,
Producer.class);
@Test
public void testProducer() {
assertEquals("quarkus", Arc.container().instance(MyInterface2.class).get().ping());
}
}
|
PackagePrivateInterfaceInHierarchyTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/sqm/tree/domain/SqmTreatedBagJoin.java
|
{
"start": 986,
"end": 4820
}
|
class ____<L, R, S extends R> extends SqmBagJoin<L, S> implements SqmTreatedAttributeJoin<L, R, S> {
private final SqmBagJoin<L, R> wrappedPath;
private final SqmTreatableDomainType<S> treatTarget;
public SqmTreatedBagJoin(
SqmBagJoin<L, R> wrappedPath,
SqmTreatableDomainType<S> treatTarget,
@Nullable String alias) {
this( wrappedPath, treatTarget, alias, false );
}
public SqmTreatedBagJoin(
SqmBagJoin<L, R> wrappedPath,
SqmTreatableDomainType<S> treatTarget,
@Nullable String alias,
boolean fetched) {
//noinspection unchecked
super(
wrappedPath.getLhs(),
wrappedPath.getNavigablePath()
.append( CollectionPart.Nature.ELEMENT.getName() )
.treatAs( treatTarget.getTypeName(), alias ),
(SqmBagPersistentAttribute<L, S>) wrappedPath.getAttribute(),
alias,
wrappedPath.getSqmJoinType(),
fetched,
wrappedPath.nodeBuilder()
);
this.treatTarget = treatTarget;
this.wrappedPath = wrappedPath;
}
private SqmTreatedBagJoin(
NavigablePath navigablePath,
SqmBagJoin<L, R> wrappedPath,
SqmTreatableDomainType<S> treatTarget,
@Nullable String alias,
boolean fetched) {
//noinspection unchecked
super(
wrappedPath.getLhs(),
navigablePath,
(SqmBagPersistentAttribute<L, S>) wrappedPath.getAttribute(),
alias,
wrappedPath.getSqmJoinType(),
wrappedPath.isFetched(),
wrappedPath.nodeBuilder()
);
this.treatTarget = treatTarget;
this.wrappedPath = wrappedPath;
}
@Override
public SqmTreatedBagJoin<L, R, S> copy(SqmCopyContext context) {
final SqmTreatedBagJoin<L, R, S> existing = context.getCopy( this );
if ( existing != null ) {
return existing;
}
final SqmTreatedBagJoin<L, R, S> path = context.registerCopy(
this,
new SqmTreatedBagJoin<>(
getNavigablePath(),
wrappedPath.copy( context ),
treatTarget,
getExplicitAlias(),
isFetched()
)
);
copyTo( path, context );
return path;
}
@Override
public SqmBagJoin<L, R> getWrappedPath() {
return wrappedPath;
}
@Override
public TreatableDomainType<S> getTreatTarget() {
return treatTarget;
}
@Override
public @NonNull SqmBindableType<S> getNodeType() {
return treatTarget;
}
@Override
public SqmTreatableDomainType<S> getReferencedPathSource() {
return treatTarget;
}
@Override
public SqmPathSource<S> getResolvedModel() {
return treatTarget;
}
@Override
public void appendHqlString(StringBuilder hql, SqmRenderContext context) {
hql.append( "treat(" );
wrappedPath.appendHqlString( hql, context );
hql.append( " as " );
hql.append( treatTarget.getTypeName() );
hql.append( ')' );
}
@Override
public SqmTreatedBagJoin<L,R, S> on(@Nullable JpaExpression<Boolean> restriction) {
return (SqmTreatedBagJoin<L, R, S>) super.on( restriction );
}
@Override
public SqmTreatedBagJoin<L,R, S> on(@Nullable Expression<Boolean> restriction) {
return (SqmTreatedBagJoin<L, R, S>) super.on( restriction );
}
@Override
public SqmTreatedBagJoin<L,R, S> on(JpaPredicate @Nullable... restrictions) {
return (SqmTreatedBagJoin<L, R, S>) super.on( restrictions );
}
@Override
public SqmTreatedBagJoin<L,R, S> on(Predicate @Nullable... restrictions) {
return (SqmTreatedBagJoin<L, R, S>) super.on( restrictions );
}
@Override
public <S1 extends S> SqmTreatedBagJoin<L, S, S1> treatAs(Class<S1> treatJavaType, @Nullable String alias, boolean fetch) {
//noinspection unchecked
return (SqmTreatedBagJoin<L, S, S1>) wrappedPath.treatAs( treatJavaType, alias, fetch );
}
@Override
public <S1 extends S> SqmTreatedBagJoin<L, S, S1> treatAs(EntityDomainType<S1> treatTarget, @Nullable String alias, boolean fetch) {
//noinspection unchecked
return (SqmTreatedBagJoin<L, S, S1>) wrappedPath.treatAs( treatTarget, alias, fetch );
}
}
|
SqmTreatedBagJoin
|
java
|
apache__flink
|
flink-table/flink-sql-client/src/test/java/org/apache/flink/table/client/cli/utils/SqlParserHelper.java
|
{
"start": 1163,
"end": 2295
}
|
class ____ {
// return the sql parser instance hold by this table evn.
private final TableEnvironment tableEnv;
public SqlParserHelper() {
tableEnv = TableEnvironment.create(EnvironmentSettings.newInstance().build());
}
/** prepare some tables for testing. */
public void registerTables() {
registerTable(
"create table MyTable (a int, b bigint, c varchar(32)) "
+ "with ('connector' = 'filesystem', 'path' = '/non', 'format' = 'csv')");
registerTable(
"create table MyOtherTable (a int, b bigint) "
+ "with ('connector' = 'filesystem', 'path' = '/non', 'format' = 'csv')");
registerTable(
"create table MySink (a int, c varchar(32)) with ('connector' = 'COLLECTION' )");
registerTable("create view MyView as select * from MyTable");
}
public void registerTable(String createTableStmt) {
tableEnv.executeSql(createTableStmt);
}
public Parser getSqlParser() {
return ((TableEnvironmentInternal) tableEnv).getParser();
}
}
|
SqlParserHelper
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory/annotation/ServiceAnnotationPostProcessor.java
|
{
"start": 9514,
"end": 13099
}
|
class ____
scanServiceBeans(resolvedPackagesToScan, registry);
}
}
/**
* Scan and registers service beans whose classes was annotated {@link Service}
*
* @param packagesToScan The base packages to scan
* @param registry {@link BeanDefinitionRegistry}
*/
private void scanServiceBeans(Set<String> packagesToScan, BeanDefinitionRegistry registry) {
scanned = true;
if (CollectionUtils.isEmpty(packagesToScan)) {
if (logger.isWarnEnabled()) {
logger.warn(
CONFIG_NO_BEANS_SCANNED,
"",
"",
"packagesToScan is empty , ServiceBean registry will be ignored!");
}
return;
}
DubboClassPathBeanDefinitionScanner scanner =
new DubboClassPathBeanDefinitionScanner(registry, environment, resourceLoader);
BeanNameGenerator beanNameGenerator = resolveBeanNameGenerator(registry);
scanner.setBeanNameGenerator(beanNameGenerator);
for (Class<? extends Annotation> annotationType : serviceAnnotationTypes) {
scanner.addIncludeFilter(new AnnotationTypeFilter(annotationType));
}
ScanExcludeFilter scanExcludeFilter = new ScanExcludeFilter();
scanner.addExcludeFilter(scanExcludeFilter);
for (String packageToScan : packagesToScan) {
// avoid duplicated scans
if (servicePackagesHolder.isPackageScanned(packageToScan)) {
if (logger.isInfoEnabled()) {
logger.info("Ignore package who has already bean scanned: " + packageToScan);
}
continue;
}
if (AotWithSpringDetector.useGeneratedArtifacts()) {
scanner.setIncludeAnnotationConfig(false);
}
// Registers @Service Bean first
scanner.scan(packageToScan);
// Finds all BeanDefinitionHolders of @Service whether @ComponentScan scans or not.
Set<BeanDefinitionHolder> beanDefinitionHolders =
findServiceBeanDefinitionHolders(scanner, packageToScan, registry, beanNameGenerator);
if (!CollectionUtils.isEmpty(beanDefinitionHolders)) {
if (logger.isInfoEnabled()) {
List<String> serviceClasses = new ArrayList<>(beanDefinitionHolders.size());
for (BeanDefinitionHolder beanDefinitionHolder : beanDefinitionHolders) {
serviceClasses.add(
beanDefinitionHolder.getBeanDefinition().getBeanClassName());
}
logger.info("Found " + beanDefinitionHolders.size()
+ " classes annotated by Dubbo @Service under package [" + packageToScan + "]: "
+ serviceClasses);
}
for (BeanDefinitionHolder beanDefinitionHolder : beanDefinitionHolders) {
processScannedBeanDefinition(beanDefinitionHolder);
servicePackagesHolder.addScannedClass(
beanDefinitionHolder.getBeanDefinition().getBeanClassName());
}
} else {
if (logger.isWarnEnabled()) {
logger.warn(
CONFIG_NO_ANNOTATIONS_FOUND,
"No annotations were found on the class",
"",
"No
|
here
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/appender/rolling/RollingAppenderReconfigureTest.java
|
{
"start": 1448,
"end": 3003
}
|
class ____ {
private static final URL CONFIG =
RollingAppenderReconfigureTest.class.getResource("RollingAppenderReconfigureTest.xml");
private static final File CONFIG_FILE = FileUtils.toFile(CONFIG);
@TempLoggingDir
private static Path loggingPath;
@Test
@LoggerContextSource
void testReconfigure(final LoggerContext context) throws Exception {
final Logger logger = context.getLogger(getClass());
for (int i = 0; i < 500; ++i) {
logger.debug("This is test message number {}", i);
}
assertThat(loggingPath).isDirectoryContaining("glob:**/*.current").isDirectoryContaining("glob:**/*.rolled");
final String originalXmlConfig = FileUtils.readFileToString(CONFIG_FILE, UTF_8);
try {
final String updatedXmlConfig =
originalXmlConfig.replace("rollingtest.%i.rolled", "rollingtest.%i.reconfigured");
FileUtils.write(CONFIG_FILE, updatedXmlConfig, UTF_8);
// Reconfigure
context.reconfigure();
for (int i = 0; i < 500; ++i) {
logger.debug("This is test message number {}", i);
}
assertThat(loggingPath)
.isDirectoryContaining("glob:**/*.reconfigured")
.isDirectoryContaining("glob:**/*.current")
.isDirectoryContaining("glob:**/*.rolled");
} finally {
FileUtils.write(CONFIG_FILE, originalXmlConfig, UTF_8);
}
}
}
|
RollingAppenderReconfigureTest
|
java
|
apache__avro
|
lang/java/avro/src/test/java/org/apache/avro/SchemaFormatterTest.java
|
{
"start": 2827,
"end": 3019
}
|
class ____ implements SchemaFormatterFactory {
@Override
public SchemaFormatter getDefaultFormatter() {
return null;
}
}
private static
|
Wrongly_Named_SchemaFormatterFactory
|
java
|
spring-projects__spring-boot
|
documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/features/taskexecutionandscheduling/multiple/MyTaskExecutorConfiguration.java
|
{
"start": 1008,
"end": 1421
}
|
class ____ {
@Bean("applicationTaskExecutor")
SimpleAsyncTaskExecutor applicationTaskExecutor() {
return new SimpleAsyncTaskExecutor("app-");
}
@Bean("taskExecutor")
ThreadPoolTaskExecutor taskExecutor() {
ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
threadPoolTaskExecutor.setThreadNamePrefix("async-");
return threadPoolTaskExecutor;
}
}
|
MyTaskExecutorConfiguration
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/Graphviz.java
|
{
"start": 669,
"end": 10981
}
|
class ____ {
private static final int NODE_LABEL_INDENT = 12;
private static final int CLUSTER_INDENT = 2;
private static final int INDENT = 1;
public static String dot(String name, Node<?> root) {
StringBuilder sb = new StringBuilder();
// name
sb.append(String.format(Locale.ROOT, """
digraph G { rankdir=BT;
label="%s";
node[shape=plaintext, color=azure1];
edge[color=black,arrowsize=0.5];
""", name));
handleNode(sb, root, new AtomicInteger(0), INDENT, true);
sb.append("}");
return sb.toString();
}
public static String dot(Map<String, ? extends Node<?>> clusters, boolean drawSubTrees) {
AtomicInteger nodeCounter = new AtomicInteger(0);
StringBuilder sb = new StringBuilder();
// name
sb.append("""
digraph G { rankdir=BT;
node[shape=plaintext, color=azure1];
edge[color=black];
graph[compound=true];
""");
int clusterNodeStart = 1;
int clusterId = 0;
StringBuilder clusterEdges = new StringBuilder();
for (Entry<String, ? extends Node<?>> entry : clusters.entrySet()) {
indent(sb, INDENT);
// draw cluster
sb.append("subgraph cluster");
sb.append(++clusterId);
sb.append(" {\n");
indent(sb, CLUSTER_INDENT);
sb.append("color=blue;\n");
indent(sb, CLUSTER_INDENT);
sb.append("label=");
sb.append(quoteGraphviz(entry.getKey()));
sb.append(";\n\n");
/* to help align the clusters, add an invisible node (that could
* otherwise be used for labeling but it consumes too much space)
* used for alignment */
indent(sb, CLUSTER_INDENT);
sb.append("c" + clusterId);
sb.append("[style=invis]\n");
// add edge to the first node in the cluster
indent(sb, CLUSTER_INDENT);
sb.append("node" + (nodeCounter.get() + 1));
sb.append(" -> ");
sb.append("c" + clusterId);
sb.append(" [style=invis];\n");
handleNode(sb, entry.getValue(), nodeCounter, CLUSTER_INDENT, drawSubTrees);
int clusterNodeStop = nodeCounter.get();
indent(sb, INDENT);
sb.append("}\n");
// connect cluster only if there are at least two
if (clusterId > 1) {
indent(clusterEdges, INDENT);
clusterEdges.append("node" + clusterNodeStart);
clusterEdges.append(" -> ");
clusterEdges.append("node" + clusterNodeStop);
clusterEdges.append("[ltail=cluster");
clusterEdges.append(clusterId - 1);
clusterEdges.append(" lhead=cluster");
clusterEdges.append(clusterId);
clusterEdges.append("];\n");
}
clusterNodeStart = clusterNodeStop;
}
sb.append("\n");
// connecting the clusters arranges them in a weird position
// so don't
// sb.append(clusterEdges.toString());
// align the cluster by requiring the invisible nodes in each cluster to be of the same rank
indent(sb, INDENT);
sb.append("{ rank=same");
for (int i = 1; i <= clusterId; i++) {
sb.append(" c" + i);
}
sb.append(" };\n}");
return sb.toString();
}
private static void handleNode(StringBuilder output, Node<?> n, AtomicInteger nodeId, int currentIndent, boolean drawSubTrees) {
// each node has its own id
int thisId = nodeId.incrementAndGet();
// first determine node info
StringBuilder nodeInfo = new StringBuilder();
nodeInfo.append("\n");
indent(nodeInfo, currentIndent + NODE_LABEL_INDENT);
nodeInfo.append("""
<table border="0" cellborder="1" cellspacing="0">
""");
indent(nodeInfo, currentIndent + NODE_LABEL_INDENT);
nodeInfo.append(String.format(Locale.ROOT, """
<th><td border="0" colspan="2" align="center"><b>%s</b></td></th>
""", n.nodeName()));
indent(nodeInfo, currentIndent + NODE_LABEL_INDENT);
List<Object> props = n.nodeProperties();
List<String> parsed = new ArrayList<>(props.size());
List<Node<?>> subTrees = new ArrayList<>();
for (Object v : props) {
// skip null values, children and location
if (v != null && n.children().contains(v) == false) {
if (v instanceof Collection<?> c) {
StringBuilder colS = new StringBuilder();
for (Object o : c) {
if (drawSubTrees && isAnotherTree(o)) {
subTrees.add((Node<?>) o);
} else {
colS.append(o);
colS.append("\n");
}
}
if (colS.length() > 0) {
parsed.add(colS.toString());
}
} else {
if (drawSubTrees && isAnotherTree(v)) {
subTrees.add((Node<?>) v);
} else {
parsed.add(v.toString());
}
}
}
}
for (String line : parsed) {
nodeInfo.append("<tr><td align=\"left\" bgcolor=\"azure2\">");
nodeInfo.append(escapeHtml(line));
nodeInfo.append("</td></tr>\n");
indent(nodeInfo, currentIndent + NODE_LABEL_INDENT);
}
nodeInfo.append("</table>\n");
// check any subtrees
if (subTrees.isEmpty() == false) {
// write nested trees
output.append(String.format(Locale.ROOT, """
subgraph cluster_%s{
style=filled; color=white; fillcolor=azure2; label="";
""", thisId));
}
// write node info
indent(output, currentIndent);
output.append("node");
output.append(thisId);
output.append("[label=");
output.append(quoteGraphviz(nodeInfo.toString()));
output.append("];\n");
if (subTrees.isEmpty() == false) {
indent(output, currentIndent + INDENT);
output.append("node[shape=ellipse, color=black]\n");
for (Node<?> node : subTrees) {
indent(output, currentIndent + INDENT);
drawNodeTree(output, node, "st_" + thisId + "_", 0);
}
output.append("\n}\n");
}
indent(output, currentIndent + 1);
// output.append("{ rankdir=LR; rank=same; \n");
int prevId = -1;
// handle children
for (Node<?> c : n.children()) {
// the child will always have the next id
int childId = nodeId.get() + 1;
handleNode(output, c, nodeId, currentIndent + INDENT, drawSubTrees);
indent(output, currentIndent + 1);
output.append("node");
output.append(childId);
output.append(" -> ");
output.append("node");
output.append(thisId);
output.append(";\n");
// add invisible connection between children for ordering
if (prevId != -1) {
indent(output, currentIndent + 1);
output.append("node");
output.append(prevId);
output.append(" -> ");
output.append("node");
output.append(childId);
output.append(";\n");
}
prevId = childId;
}
indent(output, currentIndent);
// output.append("}\n");
}
private static void drawNodeTree(StringBuilder sb, Node<?> node, String prefix, int counter) {
String nodeName = prefix + counter;
prefix = nodeName;
// draw node
drawNode(sb, node, nodeName);
// then draw all children nodes and connections between them to be on the same level
sb.append("{ rankdir=LR; rank=same;\n");
int prevId = -1;
int saveId = counter;
for (Node<?> child : node.children()) {
int currId = ++counter;
drawNode(sb, child, prefix + currId);
if (prevId > -1) {
sb.append(prefix + prevId + " -> " + prefix + currId + " [style=invis];\n");
}
prevId = currId;
}
sb.append("}\n");
// now draw connections to the parent
for (int i = saveId; i < counter; i++) {
sb.append(prefix + (i + 1) + " -> " + nodeName + ";\n");
}
// draw the child
counter = saveId;
for (Node<?> child : node.children()) {
drawNodeTree(sb, child, prefix, ++counter);
}
}
private static void drawNode(StringBuilder sb, Node<?> node, String nodeName) {
if (node.children().isEmpty()) {
sb.append(nodeName + " [label=\"" + node.toString() + "\"];\n");
} else {
sb.append(nodeName + " [label=\"" + node.nodeName() + "\"];\n");
}
}
private static boolean isAnotherTree(Object value) {
if (value instanceof Node<?> n) {
// create a subgraph
if (n.children().size() > 0) {
return true;
}
}
return false;
}
private static String escapeHtml(Object value) {
return String.valueOf(value)
.replace("&", "&")
.replace("\"", """)
.replace("'", "'")
.replace("<", "<")
.replace(">", ">")
.replace("\n", "<br align=\"left\"/>");
}
private static String quoteGraphviz(String value) {
if (value.contains("<")) {
return "<" + value + ">";
}
return "\"" + value + "\"";
}
private static void indent(StringBuilder sb, int indent) {
for (int i = 0; i < indent; i++) {
sb.append(" ");
}
}
}
|
Graphviz
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/util/ExceptionUtil.java
|
{
"start": 129,
"end": 2524
}
|
class ____ {
private ExceptionUtil() {}
/**
* It is important never to catch all <code>Throwable</code>s. Some like
* {@link InterruptedException} should be rethrown. Based on
* <a href="https://www.scala-lang.org/api/2.13.10/scala/util/control/NonFatal$.html">scala.util.control.NonFatal</a>.
*
* This method should be used with care.
* <p>
* If the <code>Throwable</code> is fatal, it is rethrown, otherwise, this method just returns.
* The input throwable is thrown if it is an <code>Error</code> or a <code>RuntimeException</code>.
* Otherwise, the method wraps the throwable in a RuntimeException and throws that.
* </p>
*
* @param throwable to check
* @throws Error the input throwable if it is fatal
* @throws RuntimeException the input throwable if it is fatal - throws the original throwable
* if is a <code>RuntimeException</code>. Otherwise, wraps the throwable in a RuntimeException.
*/
public static void rethrowIfFatal(Throwable throwable) throws Error, RuntimeException {
if (isFatal(throwable)) {
if (throwable instanceof Error error) {
throw error;
}
if (throwable instanceof RuntimeException runtimeException) {
throw runtimeException;
}
throw new RuntimeException(throwable);
}
}
/**
* It is important never to catch all <code>Throwable</code>s. Some like
* {@link InterruptedException} should be rethrown. Based on
* <a href="https://www.scala-lang.org/api/2.13.10/scala/util/control/NonFatal$.html">scala.util.control.NonFatal</a>.
*
* @param throwable to check
* @return whether the <code>Throwable</code> is a fatal error
*/
@SuppressWarnings("removal")
private static boolean isFatal(Throwable throwable) {
return (throwable instanceof VirtualMachineError
|| throwable instanceof ThreadDeath
|| throwable instanceof InterruptedException
|| throwable instanceof ClassCircularityError
|| throwable instanceof ClassFormatError
|| throwable instanceof IncompatibleClassChangeError
|| throwable instanceof BootstrapMethodError
|| throwable instanceof VerifyError
);
}
}
|
ExceptionUtil
|
java
|
apache__camel
|
core/camel-core-processor/src/main/java/org/apache/camel/processor/errorhandler/RedeliveryErrorHandler.java
|
{
"start": 26564,
"end": 77602
}
|
class ____ implements PooledExchangeTask, Runnable {
// state
private Exchange original;
private Exchange exchange;
private AsyncCallback callback;
private int redeliveryCounter;
private long redeliveryDelay;
// default behavior which can be overloaded on a per exception basis
private Predicate retryWhilePredicate;
private RedeliveryPolicy currentRedeliveryPolicy;
private Processor failureProcessor;
private Processor onRedeliveryProcessor;
private Processor onExceptionProcessor;
private Predicate handledPredicate;
private Predicate continuedPredicate;
private boolean useOriginalInMessage;
private boolean useOriginalInBody;
public RedeliveryTask() {
}
@Override
public String toString() {
return "RedeliveryTask";
}
@Override
public void prepare(Exchange exchange, AsyncCallback callback) {
this.retryWhilePredicate = retryWhilePolicy;
this.currentRedeliveryPolicy = redeliveryPolicy;
this.handledPredicate = getDefaultHandledPredicate();
this.useOriginalInMessage = useOriginalMessagePolicy;
this.useOriginalInBody = useOriginalBodyPolicy;
this.onRedeliveryProcessor = redeliveryProcessor;
this.onExceptionProcessor = RedeliveryErrorHandler.this.onExceptionProcessor;
// do a defensive copy of the original Exchange, which is needed for redelivery so we can ensure the
// original Exchange is being redelivered, and not a mutated Exchange
this.original = redeliveryEnabled ? defensiveCopyExchangeIfNeeded(exchange) : null;
this.exchange = exchange;
this.callback = callback;
}
@Override
public void reset() {
this.retryWhilePredicate = null;
this.currentRedeliveryPolicy = null;
this.handledPredicate = null;
this.continuedPredicate = null;
this.useOriginalInMessage = false;
this.useOriginalInBody = false;
this.onRedeliveryProcessor = null;
this.onExceptionProcessor = null;
this.original = null;
this.exchange = null;
this.callback = null;
this.redeliveryCounter = 0;
this.redeliveryDelay = 0;
}
/**
* Processing and redelivery logic.
*/
@Override
public void run() {
// can we still run
if (!isRunAllowed()) {
LOG.trace("Run not allowed, will reject executing exchange: {}", exchange);
if (exchange.getException() == null) {
exchange.setException(new RejectedExecutionException());
}
AsyncCallback cb = callback;
taskFactory.release(this);
cb.done(false);
return;
}
try {
doRun();
} catch (Exception e) {
// unexpected exception during running so set exception and trigger callback
// (do not do taskFactory.release as that happens later)
exchange.setException(e);
callback.done(false);
}
}
private void doRun() throws Exception {
// did previous processing cause an exception?
if (exchange.getException() != null) {
handleException();
onExceptionOccurred();
}
// compute if we are exhausted or cannot redeliver
boolean redeliverAllowed = redeliveryCounter == 0 || isRedeliveryAllowed();
boolean exhausted = false;
if (redeliverAllowed) {
// we can redeliver but check if we are exhausted first (optimized to only check when needed)
exhausted = exchange.getExchangeExtension().isRedeliveryExhausted() || exchange.isRollbackOnly();
if (!exhausted && redeliveryCounter > 0) {
// its a potential redelivery so determine if we should redeliver or not
redeliverAllowed
= currentRedeliveryPolicy.shouldRedeliver(exchange, redeliveryCounter, retryWhilePredicate);
}
}
// if we are exhausted or redelivery is not allowed, then deliver to failure processor (eg such as DLC)
if (!redeliverAllowed || exhausted) {
Processor target = failureProcessor != null ? failureProcessor : deadLetter;
// we should always invoke the deliverToFailureProcessor as it prepares, logs and does a fair
// bit of work for exhausted exchanges (its only the target processor which may be null if handled by a savepoint)
boolean isDeadLetterChannel = isDeadLetterChannel() && target == deadLetter;
deliverToFailureProcessor(target, isDeadLetterChannel, exchange);
// we are breaking out
} else if (redeliveryCounter > 0) {
// calculate the redelivery delay
redeliveryDelay
= determineRedeliveryDelay(exchange, currentRedeliveryPolicy, redeliveryDelay, redeliveryCounter);
if (redeliveryDelay > 0) {
// okay there is a delay so create a scheduled task to have it executed in the future
if (currentRedeliveryPolicy.isAsyncDelayedRedelivery() && !exchange.isTransacted()) {
runAsynchronousRedelivery();
} else {
// async delayed redelivery was disabled or we are transacted so we must be synchronous
// as the transaction manager requires to execute in the same thread context
runSynchronousRedelivery();
}
} else {
// execute the task immediately
reactiveExecutor.schedule(this::redeliver);
}
} else {
// Simple delivery
outputAsync.process(exchange, doneSync -> {
// only continue with callback if we are done
if (isDone(exchange)) {
AsyncCallback cb = callback;
taskFactory.release(this);
reactiveExecutor.schedule(cb);
} else {
// error occurred so loop back around and call ourselves
reactiveExecutor.schedule(this);
}
});
}
}
private void runAsynchronousRedelivery() {
// we are doing a redelivery then a thread pool must be configured (see the doStart method)
ObjectHelper.notNull(executorService,
"Redelivery is enabled but ExecutorService has not been configured.", this);
// schedule the redelivery task
if (LOG.isTraceEnabled()) {
LOG.trace("Scheduling redelivery task to run in {} millis for exchangeId: {}", redeliveryDelay,
exchange.getExchangeId());
}
executorService.schedule(() -> reactiveExecutor.schedule(this::redeliver), redeliveryDelay,
TimeUnit.MILLISECONDS);
}
private void runSynchronousRedelivery() {
try {
// we are doing synchronous redelivery and use thread sleep, so we keep track using a counter how many are sleeping
redeliverySleepCounter.incrementAndGet();
boolean complete = sleep();
redeliverySleepCounter.decrementAndGet();
if (!complete) {
// the task was rejected
exchange.setException(new RejectedExecutionException("Redelivery not allowed while stopping"));
// mark the exchange as redelivery exhausted so the failure processor / dead letter channel can process the exchange
exchange.getExchangeExtension().setRedeliveryExhausted(true);
// jump to start of loop which then detects that we are failed and exhausted
reactiveExecutor.schedule(this);
} else {
reactiveExecutor.schedule(this::redeliver);
}
} catch (InterruptedException e) {
redeliverySleepCounter.decrementAndGet();
// we was interrupted so break out
exchange.setException(e);
// mark the exchange to stop continue routing when interrupted
// as we do not want to continue routing (for example a task has been cancelled)
exchange.setRouteStop(true);
reactiveExecutor.schedule(callback);
Thread.currentThread().interrupt();
}
}
protected boolean isRunAllowed() {
// if camel context is forcing a shutdown then do not allow running
if (shutdownStrategy.isForceShutdown()) {
return false;
}
// redelivery policy can control if redelivery is allowed during stopping/shutdown
// but this only applies during a redelivery (counter must > 0)
if (redeliveryCounter > 0) {
if (currentRedeliveryPolicy.allowRedeliveryWhileStopping) {
return true;
} else if (preparingShutdown) {
// we are preparing for shutdown, now determine if we can still run
return isRunAllowedOnPreparingShutdown();
}
}
// we cannot run if we are stopping/stopped
return !isStoppingOrStopped();
}
protected boolean isRedeliveryAllowed() {
// redelivery policy can control if redelivery is allowed during stopping/shutdown
// but this only applies during a redelivery (this method is only invoked when counter > 0)
boolean stopping = isStoppingOrStopped();
if (!preparingShutdown && !stopping) {
// we are not preparing to shutdown and are not stopping so we can redeliver
return true;
} else {
// we are stopping or preparing to shutdown, so see policy
return currentRedeliveryPolicy.allowRedeliveryWhileStopping;
}
}
protected void redeliver() {
// prepare for redelivery
prepareExchangeForRedelivery();
// letting onRedeliver be executed at first
deliverToOnRedeliveryProcessor();
if (exchange.isRouteStop()) {
// the on redelivery can mark that the exchange should stop and therefore not perform a redelivery
// and if so then we are done so continue callback
AsyncCallback cb = callback;
taskFactory.release(this);
reactiveExecutor.schedule(cb);
return;
}
if (LOG.isTraceEnabled()) {
LOG.trace("Redelivering exchangeId: {} -> {} for Exchange: {}", exchange.getExchangeId(), outputAsync,
exchange);
}
// emmit event we are doing redelivery
if (camelContext.getCamelContextExtension().isEventNotificationApplicable()) {
EventHelper.notifyExchangeRedelivery(exchange.getContext(), exchange, redeliveryCounter);
}
// process the exchange (also redelivery)
outputAsync.process(exchange, doneSync -> {
if (LOG.isTraceEnabled()) {
LOG.trace("Redelivering exchangeId: {}", exchange.getExchangeId());
}
// only process if the exchange hasn't failed
// and it has not been handled by the error processor
if (isDone(exchange)) {
AsyncCallback cb = callback;
taskFactory.release(this);
reactiveExecutor.schedule(cb);
} else {
// error occurred so loop back around which we do by invoking the processAsyncErrorHandler
reactiveExecutor.schedule(this);
}
});
}
protected void prepareExchangeForContinue(Exchange exchange, boolean isDeadLetterChannel) {
Exception caught = exchange.getException();
if (caught != null) {
// we continue so clear any exceptions
exchange.setException(null);
}
// clear rollback flags
exchange.setRollbackOnly(false);
// reset cached streams so they can be read again
MessageHelper.resetStreamCache(exchange.getIn());
// its continued then remove traces of redelivery attempted and caught exception
exchange.getIn().removeHeader(Exchange.REDELIVERED);
exchange.getIn().removeHeader(Exchange.REDELIVERY_COUNTER);
exchange.getIn().removeHeader(Exchange.REDELIVERY_MAX_COUNTER);
exchange.getExchangeExtension().setFailureHandled(false);
// keep the Exchange.EXCEPTION_CAUGHT as property so end user knows the caused exception
// create log message
String msg = "Failed delivery for " + ExchangeHelper.logIds(exchange);
msg = msg + ". Exhausted after delivery attempt: " + redeliveryCounter + " caught: " + caught;
msg = msg + ". Handled and continue routing.";
// log that we failed but want to continue
logFailedDelivery(false, false, false, true, isDeadLetterChannel, exchange, msg, null);
}
protected void prepareExchangeForRedelivery() {
if (!redeliveryEnabled) {
throw new IllegalStateException(
"Redelivery is not enabled on " + RedeliveryErrorHandler.this
+ ". Make sure you have configured the error handler properly.");
}
// there must be a defensive copy of the exchange
ObjectHelper.notNull(this.original, "Defensive copy of Exchange is null", RedeliveryErrorHandler.this);
// okay we will give it another go so clear the exception so we can try again
exchange.setException(null);
// clear rollback flags
exchange.setRollbackOnly(false);
// TODO: We may want to store these as state on RedeliveryData so we keep them in case end user messes with Exchange
// and then put these on the exchange when doing a redelivery / fault processor
// preserve these headers
Integer redeliveryCounter = exchange.getIn().getHeader(Exchange.REDELIVERY_COUNTER, Integer.class);
Integer redeliveryMaxCounter = exchange.getIn().getHeader(Exchange.REDELIVERY_MAX_COUNTER, Integer.class);
Boolean redelivered = exchange.getIn().getHeader(Exchange.REDELIVERED, Boolean.class);
// we are redelivering so copy from original back to exchange
exchange.getIn().copyFrom(this.original.getIn());
exchange.setOut(null);
// reset cached streams so they can be read again
MessageHelper.resetStreamCache(exchange.getIn());
// put back headers
if (redeliveryCounter != null) {
exchange.getIn().setHeader(Exchange.REDELIVERY_COUNTER, redeliveryCounter);
}
if (redeliveryMaxCounter != null) {
exchange.getIn().setHeader(Exchange.REDELIVERY_MAX_COUNTER, redeliveryMaxCounter);
}
if (redelivered != null) {
exchange.getIn().setHeader(Exchange.REDELIVERED, redelivered);
}
}
protected void handleException() {
Exception e = exchange.getException();
// e is never null
Throwable previous = exchange.getProperty(ExchangePropertyKey.EXCEPTION_CAUGHT, Throwable.class);
if (previous != null && previous != e) {
// a 2nd exception was thrown while handling a previous exception
// so we need to add the previous as suppressed by the new exception
// see also FatalFallbackErrorHandler
Throwable[] suppressed = e.getSuppressed();
boolean found = false;
for (Throwable t : suppressed) {
if (t == previous) {
found = true;
break;
}
}
if (!found) {
// okay before adding suppression then we must be sure its not referring to same method
// which otherwise can lead to add the same exception over and over again
final boolean same = isSame(e, previous);
if (!same) {
e.addSuppressed(previous);
}
}
}
// store the original caused exception in a property, so we can restore it later
exchange.setProperty(ExchangePropertyKey.EXCEPTION_CAUGHT, e);
// find the error handler to use (if any)
ExceptionPolicy exceptionPolicy = getExceptionPolicy(exchange, e);
if (exceptionPolicy != null) {
currentRedeliveryPolicy
= exceptionPolicy.createRedeliveryPolicy(exchange.getContext(), currentRedeliveryPolicy);
handledPredicate = exceptionPolicy.getHandledPolicy();
continuedPredicate = exceptionPolicy.getContinuedPolicy();
retryWhilePredicate = exceptionPolicy.getRetryWhilePolicy();
useOriginalInMessage = exceptionPolicy.isUseOriginalInMessage();
useOriginalInBody = exceptionPolicy.isUseOriginalInBody();
// route specific failure handler?
Processor processor = null;
Route rc = ExchangeHelper.getRoute(exchange);
if (rc != null) {
processor = rc.getOnException(exceptionPolicy.getId());
} else {
// note this should really not happen, but we have this code as a fail safe
// to be backwards compatible with the old behavior
LOG.warn(
"Cannot determine current route from Exchange with id: {}, will fallback and use first error handler.",
exchange.getExchangeId());
}
if (processor != null) {
failureProcessor = processor;
}
// route specific on redelivery?
processor = exceptionPolicy.getOnRedelivery();
if (processor != null) {
onRedeliveryProcessor = processor;
}
// route specific on exception occurred?
processor = exceptionPolicy.getOnExceptionOccurred();
if (processor != null) {
onExceptionProcessor = processor;
}
}
// only log if not failure handled or not an exhausted unit of work
if (!ExchangeHelper.isFailureHandled(exchange) && !ExchangeHelper.isUnitOfWorkExhausted(exchange)) {
String msg = "Failed delivery for " + ExchangeHelper.logIds(exchange)
+ ". On delivery attempt: " + redeliveryCounter + " caught: " + e;
logFailedDelivery(true, false, false, false, isDeadLetterChannel(), exchange, msg, e);
}
redeliveryCounter = incrementRedeliveryCounter(exchange);
// store where the exception happened
Route rc = ExchangeHelper.getRoute(exchange);
if (rc != null) {
exchange.setProperty(ExchangePropertyKey.FAILURE_ROUTE_ID, rc.getRouteId());
}
}
/**
* Gives an optional configured OnExceptionOccurred processor a chance to process just after an exception was
* thrown while processing the Exchange. This allows to execute the processor at the same time the exception was
* thrown.
*/
protected void onExceptionOccurred() {
if (onExceptionProcessor == null) {
return;
}
// run this synchronously as its just a Processor
try {
if (LOG.isTraceEnabled()) {
LOG.trace("OnExceptionOccurred processor {} is processing Exchange: {} due exception occurred",
onExceptionProcessor, exchange);
}
onExceptionProcessor.process(exchange);
} catch (Exception e) {
// we dont not want new exception to override existing, so log it as a WARN
LOG.warn("Error during processing OnExceptionOccurred. This exception is ignored.", e);
}
LOG.trace("OnExceptionOccurred processor done");
}
/**
* Gives an optional configured redelivery processor a chance to process before the Exchange will be
* redelivered. This can be used to alter the Exchange.
*/
protected void deliverToOnRedeliveryProcessor() {
if (onRedeliveryProcessor == null) {
return;
}
if (LOG.isTraceEnabled()) {
LOG.trace("Redelivery processor {} is processing Exchange: {} before its redelivered",
onRedeliveryProcessor, exchange);
}
// run this synchronously as its just a Processor
try {
onRedeliveryProcessor.process(exchange);
} catch (Exception e) {
exchange.setException(e);
}
LOG.trace("Redelivery processor done");
}
/**
* All redelivery attempts failed so move the exchange to the dead letter queue
*/
protected void deliverToFailureProcessor(
final Processor processor, final boolean isDeadLetterChannel, final Exchange exchange) {
// we did not success with the redelivery so now we let the failure processor handle it
// clear exception as we let the failure processor handle it
Exception caught = exchange.getException();
if (caught != null) {
exchange.setException(null);
}
final boolean shouldHandle = shouldHandle(exchange);
final boolean shouldContinue = shouldContinue(exchange);
// regard both handled or continued as being handled
boolean handled = false;
// always handle if dead letter channel
boolean handleOrContinue = isDeadLetterChannel || shouldHandle || shouldContinue;
if (handleOrContinue) {
// its handled then remove traces of redelivery attempted
exchange.getIn().removeHeader(Exchange.REDELIVERED);
exchange.getIn().removeHeader(Exchange.REDELIVERY_COUNTER);
exchange.getIn().removeHeader(Exchange.REDELIVERY_MAX_COUNTER);
exchange.getExchangeExtension().setRedeliveryExhausted(false);
// and remove traces of rollback only and uow exhausted markers
exchange.setRollbackOnly(false);
exchange.removeProperty(ExchangePropertyKey.UNIT_OF_WORK_EXHAUSTED);
handled = true;
} else {
// must decrement the redelivery counter as we didn't process the redelivery but is
// handling by the failure handler. So we must -1 to not let the counter be out-of-sync
decrementRedeliveryCounter(exchange);
}
// we should allow using the failure processor if we should not continue
// or in case of continue then the failure processor is NOT a dead letter channel
// because you can continue and still let the failure processor do some routing
// before continue in the main route.
boolean allowFailureProcessor = !shouldContinue || !isDeadLetterChannel;
final boolean fHandled = handled;
if (allowFailureProcessor && processor != null) {
// prepare original IN message/body if it should be moved instead of current message/body
if (useOriginalInMessage || useOriginalInBody) {
Message original = ExchangeHelper.getOriginalInMessage(exchange);
if (useOriginalInMessage) {
LOG.trace("Using the original IN message instead of current");
exchange.setIn(original);
} else {
LOG.trace("Using the original IN message body instead of current");
exchange.getIn().setBody(original.getBody());
}
if (exchange.hasOut()) {
LOG.trace("Removing the out message to avoid some uncertain behavior");
exchange.setOut(null);
}
}
// reset cached streams so they can be read again
MessageHelper.resetStreamCache(exchange.getIn());
// store the last to endpoint as the failure endpoint
exchange.setProperty(ExchangePropertyKey.FAILURE_ENDPOINT,
exchange.getProperty(ExchangePropertyKey.TO_ENDPOINT));
// and store the route id, so we know in which route we failed
Route rc = ExchangeHelper.getRoute(exchange);
if (rc != null) {
exchange.setProperty(ExchangePropertyKey.FAILURE_ROUTE_ID, rc.getRouteId());
}
// invoke custom on prepare
if (onPrepareProcessor != null) {
try {
LOG.trace("OnPrepare processor {} is processing Exchange: {}", onPrepareProcessor, exchange);
onPrepareProcessor.process(exchange);
} catch (Exception e) {
// a new exception was thrown during prepare
exchange.setException(e);
}
}
LOG.trace("Failure processor {} is processing Exchange: {}", processor, exchange);
// fire event as we had a failure processor to handle it, which there is a event for
final boolean deadLetterChannel = processor == deadLetter;
if (camelContext.getCamelContextExtension().isEventNotificationApplicable()) {
EventHelper.notifyExchangeFailureHandling(exchange.getContext(), exchange, processor, deadLetterChannel,
deadLetterUri);
}
// the failure processor could also be asynchronous
AsyncProcessor afp = AsyncProcessorConverterHelper.convert(processor);
afp.process(exchange, sync -> {
LOG.trace("Failure processor done: {} processing Exchange: {}", processor, exchange);
try {
prepareExchangeAfterFailure(exchange, isDeadLetterChannel, shouldHandle, shouldContinue);
// fire event as we had a failure processor to handle it, which there is a event for
if (camelContext.getCamelContextExtension().isEventNotificationApplicable()) {
EventHelper.notifyExchangeFailureHandled(exchange.getContext(), exchange, processor,
deadLetterChannel, deadLetterUri);
}
} finally {
// if the fault was handled asynchronously, this should be reflected in the callback as well
reactiveExecutor.schedule(callback);
// create log message
String msg = "Failed delivery for " + ExchangeHelper.logIds(exchange);
msg = msg + ". Exhausted after delivery attempt: " + redeliveryCounter + " caught: " + caught;
if (isDeadLetterChannel && deadLetterUri != null) {
msg = msg + ". Handled by DeadLetterChannel: [" + URISupport.sanitizeUri(deadLetterUri) + "]";
} else {
msg = msg + ". Processed by failure processor: " + processor;
}
// log that we failed delivery as we are exhausted
logFailedDelivery(false, false, fHandled, false, isDeadLetterChannel, exchange, msg, null);
// we are done so we can release the task
taskFactory.release(this);
}
});
} else {
try {
// store the last to endpoint as the failure endpoint
exchange.setProperty(ExchangePropertyKey.FAILURE_ENDPOINT,
exchange.getProperty(ExchangePropertyKey.TO_ENDPOINT));
// and store the route id, so we know in which route we failed
Route rc = ExchangeHelper.getRoute(exchange);
if (rc != null) {
exchange.setProperty(ExchangePropertyKey.FAILURE_ROUTE_ID, rc.getRouteId());
}
// invoke custom on prepare
if (onPrepareProcessor != null) {
try {
LOG.trace("OnPrepare processor {} is processing Exchange: {}", onPrepareProcessor, exchange);
onPrepareProcessor.process(exchange);
} catch (Exception e) {
// a new exception was thrown during prepare
exchange.setException(e);
}
}
// no processor but we need to prepare after failure as well
prepareExchangeAfterFailure(exchange, isDeadLetterChannel, shouldHandle, shouldContinue);
} finally {
// callback we are done
reactiveExecutor.schedule(callback);
// create log message
String msg = "Failed delivery for " + ExchangeHelper.logIds(exchange);
msg = msg + ". Exhausted after delivery attempt: " + redeliveryCounter + " caught: " + caught;
if (processor != null) {
if (deadLetterUri != null) {
msg = msg + ". Handled by DeadLetterChannel: [" + URISupport.sanitizeUri(deadLetterUri) + "]";
} else {
msg = msg + ". Processed by failure processor: " + processor;
}
}
// log that we failed delivery as we are exhausted
logFailedDelivery(false, false, fHandled, false, isDeadLetterChannel, exchange, msg, null);
// we are done so we can release the task
taskFactory.release(this);
}
}
}
protected void prepareExchangeAfterFailure(
final Exchange exchange, final boolean isDeadLetterChannel,
final boolean shouldHandle, final boolean shouldContinue) {
Exception newException = exchange.getException();
// we could not process the exchange so we let the failure processor handled it
ExchangeHelper.setFailureHandled(exchange);
// honor if already set a handling
boolean alreadySet = exchange.getExchangeExtension().isErrorHandlerHandledSet();
if (alreadySet) {
boolean handled = exchange.getExchangeExtension().isErrorHandlerHandled();
LOG.trace("This exchange has already been marked for handling: {}", handled);
if (!handled) {
// exception not handled, put exception back in the exchange
exchange.setException(exchange.getProperty(ExchangePropertyKey.EXCEPTION_CAUGHT, Exception.class));
// and put failure endpoint back as well
exchange.setProperty(ExchangePropertyKey.FAILURE_ENDPOINT,
exchange.getProperty(ExchangePropertyKey.TO_ENDPOINT));
}
return;
}
// dead letter channel is special
if (shouldContinue) {
LOG.trace("This exchange is continued: {}", exchange);
// okay we want to continue then prepare the exchange for that as well
prepareExchangeForContinue(exchange, isDeadLetterChannel);
} else if (shouldHandle) {
LOG.trace("This exchange is handled so its marked as not failed: {}", exchange);
exchange.getExchangeExtension().setErrorHandlerHandled(true);
} else {
// okay the redelivery policy are not explicit set to true, so we should allow to check for some
// special situations when using dead letter channel
if (isDeadLetterChannel) {
// DLC is always handling the first thrown exception,
// but if its a new exception then use the configured option
boolean handled = newException == null || deadLetterHandleNewException;
// when using DLC then log new exception whether its being handled or not, as otherwise it may appear as
// the DLC swallow new exceptions by default (which is by design to ensure the DLC always complete,
// to avoid causing endless poison messages that fails forever)
if (newException != null && currentRedeliveryPolicy.isLogNewException()) {
String uri = URISupport.sanitizeUri(deadLetterUri);
String msg = "New exception occurred during processing by the DeadLetterChannel[" + uri + "] due "
+ newException.getMessage();
if (handled) {
msg += ". The new exception is being handled as deadLetterHandleNewException=true.";
} else {
msg += ". The new exception is not handled as deadLetterHandleNewException=false.";
}
logFailedDelivery(false, true, handled, false, true, exchange, msg, newException);
}
if (handled) {
LOG.trace("This exchange is handled so its marked as not failed: {}", exchange);
exchange.getExchangeExtension().setErrorHandlerHandled(true);
return;
}
}
// not handled by default
prepareExchangeAfterFailureNotHandled(exchange);
}
}
private void prepareExchangeAfterFailureNotHandled(Exchange exchange) {
LOG.trace("This exchange is not handled or continued so its marked as failed: {}", exchange);
// exception not handled, put exception back in the exchange
exchange.getExchangeExtension().setErrorHandlerHandled(false);
exchange.setException(exchange.getProperty(ExchangePropertyKey.EXCEPTION_CAUGHT, Exception.class));
// and put failure endpoint back as well
exchange.setProperty(ExchangePropertyKey.FAILURE_ENDPOINT, exchange.getProperty(ExchangePropertyKey.TO_ENDPOINT));
// and store the route id so we know in which route we failed
String routeId = ExchangeHelper.getAtRouteId(exchange);
if (routeId != null) {
exchange.setProperty(ExchangePropertyKey.FAILURE_ROUTE_ID, routeId);
}
}
private void logFailedDelivery(
boolean shouldRedeliver, boolean newException, boolean handled, boolean continued, boolean isDeadLetterChannel,
Exchange exchange, String message, Throwable e) {
if (logger == null) {
return;
}
if (!exchange.isRollbackOnly() && !exchange.isRollbackOnlyLast()) {
if (newException && !currentRedeliveryPolicy.isLogNewException()) {
// do not log new exception
return;
}
// if we should not rollback, then check whether logging is enabled
if (!newException && handled && !currentRedeliveryPolicy.isLogHandled()) {
// do not log handled
return;
}
if (!newException && continued && !currentRedeliveryPolicy.isLogContinued()) {
// do not log handled
return;
}
if (!newException && shouldRedeliver && !currentRedeliveryPolicy.isLogRetryAttempted()) {
// do not log retry attempts
return;
}
if (!newException && shouldRedeliver) {
if (currentRedeliveryPolicy.isLogRetryAttempted()) {
if (currentRedeliveryPolicy.getRetryAttemptedLogInterval() > 1
&& redeliveryCounter % currentRedeliveryPolicy.getRetryAttemptedLogInterval() != 0) {
// do not log retry attempt because it is excluded by the retryAttemptedLogInterval
return;
}
} else {
// do not log retry attempts
return;
}
}
if (!newException && !shouldRedeliver && !currentRedeliveryPolicy.isLogExhausted()) {
// do not log exhausted
return;
}
}
LoggingLevel newLogLevel;
boolean logStackTrace;
if (exchange.isRollbackOnly() || exchange.isRollbackOnlyLast()) {
newLogLevel = currentRedeliveryPolicy.getRetriesExhaustedLogLevel();
logStackTrace = currentRedeliveryPolicy.isLogStackTrace();
} else if (shouldRedeliver) {
newLogLevel = currentRedeliveryPolicy.getRetryAttemptedLogLevel();
logStackTrace = currentRedeliveryPolicy.isLogRetryStackTrace();
} else {
newLogLevel = currentRedeliveryPolicy.getRetriesExhaustedLogLevel();
logStackTrace = currentRedeliveryPolicy.isLogStackTrace();
}
if (e == null) {
e = exchange.getProperty(ExchangePropertyKey.EXCEPTION_CAUGHT, Exception.class);
}
if (newException) {
// log at most WARN level
if (newLogLevel == LoggingLevel.ERROR) {
newLogLevel = LoggingLevel.WARN;
}
String msg = message;
if (msg == null) {
msg = "New exception " + ExchangeHelper.logIds(exchange);
// special for logging the new exception
if (e != null) {
msg = msg + " due: " + e.getMessage();
}
}
if (e != null && logStackTrace) {
logger.log(msg, e, newLogLevel);
} else {
logger.log(msg, newLogLevel);
}
} else if (exchange.isRollbackOnly() || exchange.isRollbackOnlyLast()) {
String msg = "Rollback " + ExchangeHelper.logIds(exchange);
Throwable cause = exchange.getException() != null
? exchange.getException() : exchange.getProperty(ExchangePropertyKey.EXCEPTION_CAUGHT, Throwable.class);
if (cause != null) {
msg = msg + " due: " + cause.getMessage();
}
// should we include message history
if (!shouldRedeliver && currentRedeliveryPolicy.isLogExhaustedMessageHistory()) {
// only use the exchange formatter if we should log exhausted message body (and if using a custom formatter then always use it)
ExchangeFormatter formatter = customExchangeFormatter
? exchangeFormatter
: (currentRedeliveryPolicy.isLogExhaustedMessageBody() || camelContext.isLogExhaustedMessageBody()
? exchangeFormatter : null);
String routeStackTrace = MessageHelper.dumpMessageHistoryStacktrace(exchange, formatter, false);
msg = msg + "\n" + routeStackTrace;
}
if (newLogLevel == LoggingLevel.ERROR) {
// log intended rollback on maximum WARN level (not ERROR)
logger.log(msg, LoggingLevel.WARN);
} else {
// otherwise use the desired logging level
logger.log(msg, newLogLevel);
}
} else {
String msg = message;
// should we include message history
if (!shouldRedeliver && currentRedeliveryPolicy.isLogExhaustedMessageHistory()) {
// only use the exchange formatter if we should log exhausted message body (and if using a custom formatter then always use it)
ExchangeFormatter formatter = customExchangeFormatter
? exchangeFormatter
: (currentRedeliveryPolicy.isLogExhaustedMessageBody() || camelContext.isLogExhaustedMessageBody()
? exchangeFormatter : null);
String routeStackTrace
= MessageHelper.dumpMessageHistoryStacktrace(exchange, formatter, e != null && logStackTrace);
msg = msg + "\n" + routeStackTrace;
}
if (e != null && logStackTrace) {
logger.log(msg, e, newLogLevel);
} else {
logger.log(msg, newLogLevel);
}
}
}
/**
* Determines whether or not to continue if we are exhausted.
*
* @param exchange the current exchange
* @return <tt>true</tt> to continue, or <tt>false</tt> to exhaust.
*/
private boolean shouldContinue(Exchange exchange) {
if (continuedPredicate != null) {
return continuedPredicate.matches(exchange);
}
// do not continue by default
return false;
}
/**
* Determines whether or not to handle if we are exhausted.
*
* @param exchange the current exchange
* @return <tt>true</tt> to handle, or <tt>false</tt> to exhaust.
*/
private boolean shouldHandle(Exchange exchange) {
if (handledPredicate != null) {
return handledPredicate.matches(exchange);
}
// do not handle by default
return false;
}
/**
* Increments the redelivery counter and adds the redelivered flag if the message has been redelivered
*/
private int incrementRedeliveryCounter(Exchange exchange) {
Message in = exchange.getIn();
Integer counter = in.getHeader(Exchange.REDELIVERY_COUNTER, Integer.class);
int next = counter != null ? counter + 1 : 1;
in.setHeader(Exchange.REDELIVERY_COUNTER, next);
in.setHeader(Exchange.REDELIVERED, Boolean.TRUE);
// if maximum redeliveries is used, then provide that information as well
if (currentRedeliveryPolicy.getMaximumRedeliveries() > 0) {
in.setHeader(Exchange.REDELIVERY_MAX_COUNTER, currentRedeliveryPolicy.getMaximumRedeliveries());
}
return next;
}
/**
* Method for sleeping during redelivery attempts.
* <p/>
* This task is for the synchronous blocking. If using async delayed then a scheduled thread pool is used for
* sleeping and trigger redeliveries.
*/
@SuppressWarnings("BusyWait")
public boolean sleep() throws InterruptedException {
// for small delays then just sleep
if (redeliveryDelay < 1000) {
currentRedeliveryPolicy.sleep(redeliveryDelay);
return true;
}
StopWatch watch = new StopWatch();
LOG.debug("Sleeping for: {} millis until attempting redelivery", redeliveryDelay);
while (watch.taken() < redeliveryDelay) {
// sleep using 1 sec interval
long delta = redeliveryDelay - watch.taken();
long max = Math.min(1000, delta);
if (max > 0) {
LOG.trace("Sleeping for: {} millis until waking up for re-check", max);
Thread.sleep(max);
}
// are we preparing for shutdown then only do redelivery if allowed
if (preparingShutdown && !currentRedeliveryPolicy.isAllowRedeliveryWhileStopping()) {
LOG.debug("Rejected redelivery while stopping");
return false;
}
}
return true;
}
}
private static boolean isSame(Exception e, Throwable previous) {
StackTraceElement[] ste1 = e.getStackTrace();
StackTraceElement[] ste2 = previous.getStackTrace();
boolean same = false;
if (ste1 != null && ste2 != null && ste1.length > 0 && ste2.length > 0) {
same = ste1[0].getClassName().equals(ste2[0].getClassName())
&& ste1[0].getLineNumber() == ste2[0].getLineNumber();
}
return same;
}
/**
* Prepares the redelivery counter and boolean flag for the failure handle processor
*/
private void decrementRedeliveryCounter(Exchange exchange) {
Message in = exchange.getIn();
Integer counter = in.getHeader(Exchange.REDELIVERY_COUNTER, Integer.class);
if (counter != null) {
int prev = counter - 1;
in.setHeader(Exchange.REDELIVERY_COUNTER, prev);
// set boolean flag according to counter
in.setHeader(Exchange.REDELIVERED, prev > 0 ? Boolean.TRUE : Boolean.FALSE);
} else {
// not redelivered
in.setHeader(Exchange.REDELIVERY_COUNTER, 0);
in.setHeader(Exchange.REDELIVERED, Boolean.FALSE);
}
}
@Override
public boolean determineIfRedeliveryIsEnabled() throws Exception {
// determine if redeliver is enabled either on error handler
if (getRedeliveryPolicy().getMaximumRedeliveries() != 0) {
// must check for != 0 as (-1 means redeliver forever)
return true;
}
if (retryWhilePolicy != null) {
return true;
}
// or on the exception policies
if (exceptionPolicies != null && !exceptionPolicies.isEmpty()) {
// walk them to see if any of them have a maximum redeliveries > 0 or retry until set
for (ExceptionPolicy def : exceptionPolicies.values()) {
if (def.determineIfRedeliveryIsEnabled(camelContext)) {
return true;
}
}
}
return false;
}
@Override
protected void doStart() throws Exception {
// determine if redeliver is enabled or not
redeliveryEnabled = determineIfRedeliveryIsEnabled();
if (LOG.isTraceEnabled()) {
LOG.trace("Redelivery enabled: {} on error handler: {}", redeliveryEnabled, this);
}
// we only need thread pool if redelivery is enabled
if (redeliveryEnabled) {
if (executorService == null) {
// use default shared executor service
executorService = PluginHelper.getErrorHandlerExecutorService(camelContext);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Using ExecutorService: {} for redeliveries on error handler: {}", executorService, this);
}
}
// reset flag when starting
preparingShutdown = false;
redeliverySleepCounter.set(0);
// calculate if we can use simple task or not
// if we need redelivery and other things then we cannot)
// however if we dont then its less memory overhead (and a bit less cpu) of using the simple task
simpleTask = deadLetter == null && !redeliveryEnabled && (exceptionPolicies == null || exceptionPolicies.isEmpty())
&& onPrepareProcessor == null;
boolean pooled = camelContext.getCamelContextExtension().getExchangeFactory().isPooled();
if (pooled) {
String id = output instanceof IdAware ? ((IdAware) output).getId() : output.toString();
taskFactory = new PooledTaskFactory(id) {
@Override
public PooledExchangeTask create(Exchange exchange, AsyncCallback callback) {
return simpleTask ? new SimpleTask() : new RedeliveryTask();
}
};
int capacity = camelContext.getCamelContextExtension().getExchangeFactory().getCapacity();
taskFactory.setCapacity(capacity);
} else {
taskFactory = new PrototypeTaskFactory() {
@Override
public PooledExchangeTask create(Exchange exchange, AsyncCallback callback) {
return simpleTask ? new SimpleTask() : new RedeliveryTask();
}
};
}
LOG.trace("Using TaskFactory: {}", taskFactory);
ServiceHelper.startService(taskFactory, output, outputAsync, deadLetter);
}
@Override
protected void doShutdown() throws Exception {
ServiceHelper.stopAndShutdownServices(deadLetter, output, outputAsync, taskFactory);
}
}
|
RedeliveryTask
|
java
|
elastic__elasticsearch
|
x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormatTests.java
|
{
"start": 2349,
"end": 5955
}
|
class ____ extends BasePostingsFormatTestCase {
private final Codec codec = TestUtil.alwaysPostingsFormat(new Lucene50RWPostingsFormat());
@Override
protected Codec getCodec() {
return codec;
}
/** Make sure the final sub-block(s) are not skipped. */
public void testFinalBlock() throws Exception {
Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())));
for (int i = 0; i < 25; i++) {
Document doc = new Document();
doc.add(newStringField("field", Character.toString((char) (97 + i)), Field.Store.NO));
doc.add(newStringField("field", "z" + Character.toString((char) (97 + i)), Field.Store.NO));
w.addDocument(doc);
}
w.forceMerge(1);
DirectoryReader r = DirectoryReader.open(w);
assertEquals(1, r.leaves().size());
FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field");
// We should see exactly two blocks: one root block (prefix empty string) and one block for z*
// terms (prefix z):
Stats stats = field.getStats();
assertEquals(0, stats.floorBlockCount);
assertEquals(2, stats.nonFloorBlockCount);
r.close();
w.close();
d.close();
}
public void testImpactSerialization() throws IOException {
// omit norms and omit freqs
doTestImpactSerialization(Collections.singletonList(new Impact(1, 1L)));
// omit freqs
doTestImpactSerialization(Collections.singletonList(new Impact(1, 42L)));
// omit freqs with very large norms
doTestImpactSerialization(Collections.singletonList(new Impact(1, -100L)));
// omit norms
doTestImpactSerialization(Collections.singletonList(new Impact(30, 1L)));
// omit norms with large freq
doTestImpactSerialization(Collections.singletonList(new Impact(500, 1L)));
// freqs and norms, basic
doTestImpactSerialization(
Arrays.asList(
new Impact(1, 7L),
new Impact(3, 9L),
new Impact(7, 10L),
new Impact(15, 11L),
new Impact(20, 13L),
new Impact(28, 14L)
)
);
// freqs and norms, high values
doTestImpactSerialization(
Arrays.asList(
new Impact(2, 2L),
new Impact(10, 10L),
new Impact(12, 50L),
new Impact(50, -100L),
new Impact(1000, -80L),
new Impact(1005, -3L)
)
);
}
private void doTestImpactSerialization(List<Impact> impacts) throws IOException {
CompetitiveImpactAccumulator acc = new CompetitiveImpactAccumulator();
for (Impact impact : impacts) {
acc.add(impact.freq, impact.norm);
}
try (Directory dir = newDirectory()) {
try (IndexOutput out = EndiannessReverserUtil.createOutput(dir, "foo", IOContext.DEFAULT)) {
Lucene50SkipWriter.writeImpacts(acc, out);
}
try (IndexInput in = EndiannessReverserUtil.openInput(dir, "foo", IOContext.DEFAULT)) {
byte[] b = new byte[Math.toIntExact(in.length())];
in.readBytes(b, 0, b.length);
List<Impact> impacts2 = Lucene50ScoreSkipReader.readImpacts(new ByteArrayDataInput(b), new MutableImpactList());
assertEquals(impacts, impacts2);
}
}
}
}
|
BlockPostingsFormatTests
|
java
|
apache__dubbo
|
dubbo-metrics/dubbo-metrics-default/src/test/java/org/apache/dubbo/rpc/cluster/filter/MockInvocation.java
|
{
"start": 1555,
"end": 4580
}
|
class ____ extends RpcInvocation {
private Map<String, Object> attachments;
public MockInvocation() {
attachments = new HashMap<>();
attachments.put(PATH_KEY, "dubbo");
attachments.put(GROUP_KEY, "dubbo");
attachments.put(VERSION_KEY, "1.0.0");
attachments.put(DUBBO_VERSION_KEY, "1.0.0");
attachments.put(TOKEN_KEY, "sfag");
attachments.put(TIMEOUT_KEY, "1000");
}
@Override
public String getTargetServiceUniqueName() {
return null;
}
@Override
public String getProtocolServiceKey() {
return null;
}
public String getMethodName() {
return "echo";
}
@Override
public String getServiceName() {
return "DemoService";
}
public Class<?>[] getParameterTypes() {
return new Class[] {String.class};
}
public Object[] getArguments() {
return new Object[] {"aa"};
}
public Map<String, String> getAttachments() {
return new AttachmentsAdapter.ObjectToStringMap(attachments);
}
@Override
public Map<String, Object> getObjectAttachments() {
return attachments;
}
@Override
public void setAttachment(String key, String value) {
setObjectAttachment(key, value);
}
@Override
public void setAttachment(String key, Object value) {
setObjectAttachment(key, value);
}
@Override
public void setObjectAttachment(String key, Object value) {
attachments.put(key, value);
}
@Override
public void setAttachmentIfAbsent(String key, String value) {
setObjectAttachmentIfAbsent(key, value);
}
@Override
public void setAttachmentIfAbsent(String key, Object value) {
setObjectAttachmentIfAbsent(key, value);
}
@Override
public void setObjectAttachmentIfAbsent(String key, Object value) {
attachments.put(key, value);
}
public Invoker<?> getInvoker() {
return null;
}
@Override
public void setServiceModel(ServiceModel serviceModel) {}
@Override
public ServiceModel getServiceModel() {
return null;
}
@Override
public Object put(Object key, Object value) {
return null;
}
@Override
public Object get(Object key) {
return null;
}
@Override
public Map<Object, Object> getAttributes() {
return null;
}
public String getAttachment(String key) {
return (String) getObjectAttachments().get(key);
}
@Override
public Object getObjectAttachment(String key) {
return attachments.get(key);
}
public String getAttachment(String key, String defaultValue) {
return (String) getObjectAttachments().get(key);
}
@Override
public Object getObjectAttachment(String key, Object defaultValue) {
Object result = attachments.get(key);
if (result == null) {
return defaultValue;
}
return result;
}
}
|
MockInvocation
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/convert/support/DateToInstantConverter.java
|
{
"start": 1343,
"end": 1492
}
|
class ____ implements Converter<Date, Instant> {
@Override
public Instant convert(Date date) {
return date.toInstant();
}
}
|
DateToInstantConverter
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/AccessExecution.java
|
{
"start": 1180,
"end": 3610
}
|
interface ____ {
/**
* Returns the {@link ExecutionAttemptID} for this Execution.
*
* @return ExecutionAttemptID for this execution
*/
ExecutionAttemptID getAttemptId();
/**
* Returns the attempt number for this execution.
*
* @return attempt number for this execution.
*/
int getAttemptNumber();
/**
* Returns the timestamps for every {@link ExecutionState}.
*
* @return timestamps for each state
*/
long[] getStateTimestamps();
/**
* Returns the end timestamps for every {@link ExecutionState}.
*
* @return timestamps for each state
*/
long[] getStateEndTimestamps();
/**
* Returns the current {@link ExecutionState} for this execution.
*
* @return execution state for this execution
*/
ExecutionState getState();
/**
* Returns the {@link TaskManagerLocation} for this execution.
*
* @return taskmanager location for this execution.
*/
TaskManagerLocation getAssignedResourceLocation();
/**
* Returns the exception that caused the job to fail. This is the first root exception that was
* not recoverable and triggered job failure.
*
* @return an {@code Optional} of {@link ErrorInfo} containing the {@code Throwable} and the
* time it was registered if an error occurred. If no error occurred an empty {@code
* Optional} will be returned.
*/
Optional<ErrorInfo> getFailureInfo();
/**
* Returns the timestamp for the given {@link ExecutionState}.
*
* @param state state for which the timestamp should be returned
* @return timestamp for the given state
*/
long getStateTimestamp(ExecutionState state);
/**
* Returns the end timestamp for the given {@link ExecutionState}.
*
* @param state state for which the timestamp should be returned
* @return timestamp for the given state
*/
long getStateEndTimestamp(ExecutionState state);
/**
* Returns the user-defined accumulators as strings.
*
* @return user-defined accumulators as strings.
*/
StringifiedAccumulatorResult[] getUserAccumulatorsStringified();
/**
* Returns the subtask index of this execution.
*
* @return subtask index of this execution.
*/
int getParallelSubtaskIndex();
IOMetrics getIOMetrics();
}
|
AccessExecution
|
java
|
junit-team__junit5
|
junit-jupiter-engine/src/main/java/org/junit/jupiter/engine/extension/ExtensionRegistrar.java
|
{
"start": 3121,
"end": 3248
}
|
class ____ created. Until they
* are initialized, such extensions are not available for use.
*
* @param testClass the test
|
is
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java
|
{
"start": 3711,
"end": 4214
}
|
class ____ extends ScriptDocValues<Long> {
public Longs(Supplier<Long> supplier) {
super(supplier);
}
public long getValue() {
return get(0);
}
@Override
public Long get(int index) {
throwIfEmpty();
throwIfBeyondLength(index);
return supplier.getInternal(index);
}
@Override
public int size() {
return supplier.size();
}
}
public static
|
Longs
|
java
|
quarkusio__quarkus
|
core/runtime/src/main/java/io/quarkus/runtime/configuration/QuarkusConfigValue.java
|
{
"start": 2128,
"end": 3563
}
|
class ____ implements ObjectSubstitution<ConfigValue, QuarkusConfigValue> {
@Override
public QuarkusConfigValue serialize(final ConfigValue obj) {
QuarkusConfigValue configValue = new QuarkusConfigValue();
configValue.setName(obj.getName());
configValue.setValue(obj.getValue());
configValue.setRawValue(obj.getRawValue());
configValue.setProfile(obj.getProfile());
configValue.setConfigSourceName(obj.getConfigSourceName());
configValue.setConfigSourceOrdinal(obj.getConfigSourceOrdinal());
configValue.setConfigSourcePosition(obj.getConfigSourcePosition());
configValue.setLineNumber(obj.getLineNumber());
return configValue;
}
@Override
public ConfigValue deserialize(final QuarkusConfigValue obj) {
return ConfigValue.builder()
.withName(obj.getName())
.withValue(obj.getValue())
.withRawValue(obj.getRawValue())
.withProfile(obj.getProfile())
.withConfigSourceName(obj.getConfigSourceName())
.withConfigSourceOrdinal(obj.getConfigSourceOrdinal())
.withConfigSourcePosition(obj.getConfigSourcePosition())
.withLineNumber(obj.getLineNumber())
.build();
}
}
}
|
Substitution
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-spring6/src/main/java/org/apache/dubbo/config/spring6/beans/factory/aot/ReferencedFieldValueResolver.java
|
{
"start": 2396,
"end": 2475
}
|
class ____ being
* used (typically to support private fields).
*/
public final
|
is
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/generated/SimpleEntity.java
|
{
"start": 441,
"end": 997
}
|
class ____ {
@Id
@GeneratedValue
private Integer id;
private String data;
@Generated
@Column(columnDefinition = "integer default 1")
private int caseNumberInsert;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public int getCaseNumberInsert() {
return caseNumberInsert;
}
public void setCaseNumberInsert(int caseNumberInsert) {
this.caseNumberInsert = caseNumberInsert;
}
}
|
SimpleEntity
|
java
|
apache__camel
|
components/camel-hazelcast/src/test/java/org/apache/camel/component/hazelcast/HazelcastMultimapProducerTest.java
|
{
"start": 1564,
"end": 8106
}
|
class ____ extends HazelcastCamelTestSupport {
@Mock
private MultiMap<Object, Object> map;
@Override
protected void trainHazelcastInstance(HazelcastInstance hazelcastInstance) {
when(hazelcastInstance.getMultiMap("bar")).thenReturn(map);
}
@Override
protected void verifyHazelcastInstance(HazelcastInstance hazelcastInstance) {
verify(hazelcastInstance, atLeastOnce()).getMultiMap("bar");
}
@AfterEach
public void verifyMapMock() {
verifyNoMoreInteractions(map);
}
@Test
public void testWithInvalidOperation() {
assertThrows(CamelExecutionException.class,
() -> template.sendBodyAndHeader("direct:putInvalid", "my-foo", HazelcastConstants.OBJECT_ID, "4711"));
}
@Test
public void testPut() throws InterruptedException {
template.sendBodyAndHeader("direct:put", "my-foo", HazelcastConstants.OBJECT_ID, "4711");
verify(map).put("4711", "my-foo");
}
@Test
public void testPutWithOperationName() throws InterruptedException {
template.sendBodyAndHeader("direct:putWithOperationName", "my-foo", HazelcastConstants.OBJECT_ID, "4711");
verify(map).put("4711", "my-foo");
}
@Test
public void testPutWithOperationNumber() throws InterruptedException {
template.sendBodyAndHeader("direct:putWithOperationNumber", "my-foo", HazelcastConstants.OBJECT_ID, "4711");
verify(map).put("4711", "my-foo");
}
@Test
public void testRemoveValue() {
template.sendBodyAndHeader("direct:removeValue", "my-foo", HazelcastConstants.OBJECT_ID, "4711");
verify(map).remove("4711", "my-foo");
}
@Test
public void testGet() {
when(map.get("4711")).thenReturn(Arrays.<Object> asList("my-foo"));
template.sendBodyAndHeader("direct:get", null, HazelcastConstants.OBJECT_ID, "4711");
verify(map).get("4711");
Collection<?> body = consumer.receiveBody("seda:out", 5000, Collection.class);
assertTrue(body.contains("my-foo"));
}
@Test
public void testDelete() {
template.sendBodyAndHeader("direct:delete", null, HazelcastConstants.OBJECT_ID, 4711);
verify(map).remove(4711);
}
@Test
public void testClear() {
template.sendBody("direct:clear", "test");
verify(map).clear();
}
@Test
public void testValueCount() {
template.sendBodyAndHeader("direct:valueCount", "test", HazelcastConstants.OBJECT_ID, "4711");
verify(map).valueCount("4711");
}
@Test
public void testContainsKey() {
when(map.containsKey("testOk")).thenReturn(true);
when(map.containsKey("testKo")).thenReturn(false);
template.sendBodyAndHeader("direct:containsKey", null, HazelcastConstants.OBJECT_ID, "testOk");
Boolean body = consumer.receiveBody("seda:out", 5000, Boolean.class);
verify(map).containsKey("testOk");
assertEquals(true, body);
template.sendBodyAndHeader("direct:containsKey", null, HazelcastConstants.OBJECT_ID, "testKo");
body = consumer.receiveBody("seda:out", 5000, Boolean.class);
verify(map).containsKey("testKo");
assertEquals(false, body);
}
@Test
public void testContainsValue() {
when(map.containsValue("testOk")).thenReturn(true);
when(map.containsValue("testKo")).thenReturn(false);
template.sendBody("direct:containsValue", "testOk");
Boolean body = consumer.receiveBody("seda:out", 5000, Boolean.class);
verify(map).containsValue("testOk");
assertEquals(true, body);
template.sendBody("direct:containsValue", "testKo");
body = consumer.receiveBody("seda:out", 5000, Boolean.class);
verify(map).containsValue("testKo");
assertEquals(false, body);
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:putInvalid").setHeader(HazelcastConstants.OPERATION, constant("bogus"))
.to(String.format("hazelcast-%sbar", HazelcastConstants.MULTIMAP_PREFIX));
from("direct:put").setHeader(HazelcastConstants.OPERATION, constant(HazelcastOperation.PUT))
.to(String.format("hazelcast-%sbar", HazelcastConstants.MULTIMAP_PREFIX));
from("direct:removeValue").setHeader(HazelcastConstants.OPERATION, constant(HazelcastOperation.REMOVE_VALUE))
.to(
String.format("hazelcast-%sbar", HazelcastConstants.MULTIMAP_PREFIX));
from("direct:get").setHeader(HazelcastConstants.OPERATION, constant(HazelcastOperation.GET))
.to(String.format("hazelcast-%sbar", HazelcastConstants.MULTIMAP_PREFIX))
.to("seda:out");
from("direct:delete").setHeader(HazelcastConstants.OPERATION, constant(HazelcastOperation.DELETE))
.to(String.format("hazelcast-%sbar", HazelcastConstants.MULTIMAP_PREFIX));
from("direct:clear").setHeader(HazelcastConstants.OPERATION, constant(HazelcastOperation.CLEAR))
.to(String.format("hazelcast-%sbar", HazelcastConstants.MULTIMAP_PREFIX));
from("direct:valueCount").setHeader(HazelcastConstants.OPERATION, constant(HazelcastOperation.VALUE_COUNT))
.to(String.format("hazelcast-%sbar", HazelcastConstants.MULTIMAP_PREFIX));
from("direct:containsKey").setHeader(HazelcastConstants.OPERATION, constant(HazelcastOperation.CONTAINS_KEY))
.to(String.format("hazelcast-%sbar", HazelcastConstants.MULTIMAP_PREFIX))
.to("seda:out");
from("direct:containsValue")
.setHeader(HazelcastConstants.OPERATION, constant(HazelcastOperation.CONTAINS_VALUE))
.to(String.format("hazelcast-%sbar", HazelcastConstants.MULTIMAP_PREFIX))
.to("seda:out");
from("direct:putWithOperationNumber").toF("hazelcast-%sbar?operation=%s", HazelcastConstants.MULTIMAP_PREFIX,
HazelcastOperation.PUT);
from("direct:putWithOperationName").toF("hazelcast-%sbar?operation=PUT", HazelcastConstants.MULTIMAP_PREFIX);
}
};
}
}
|
HazelcastMultimapProducerTest
|
java
|
google__truth
|
core/src/main/java/com/google/common/truth/DoubleSubject.java
|
{
"start": 1124,
"end": 1510
}
|
class ____ extends ComparableSubject<Double> {
private final @Nullable Double actual;
private DoubleSubject(FailureMetadata metadata, @Nullable Double actual) {
super(metadata, actual);
this.actual = actual;
}
/**
* A partially specified check about an approximate relationship to a {@code double} value using a
* tolerance.
*/
public static final
|
DoubleSubject
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobExceptionsInfoWithHistory.java
|
{
"start": 10308,
"end": 13614
}
|
class ____ extends ExceptionInfo {
public static final String FIELD_NAME_CONCURRENT_EXCEPTIONS = "concurrentExceptions";
@JsonProperty(FIELD_NAME_CONCURRENT_EXCEPTIONS)
private final Collection<ExceptionInfo> concurrentExceptions;
public RootExceptionInfo(
String exceptionName,
String stacktrace,
long timestamp,
Map<String, String> failureLabels,
Collection<ExceptionInfo> concurrentExceptions) {
this(
exceptionName,
stacktrace,
timestamp,
failureLabels,
null,
null,
null,
concurrentExceptions);
}
@JsonCreator
public RootExceptionInfo(
@JsonProperty(FIELD_NAME_EXCEPTION_NAME) String exceptionName,
@JsonProperty(FIELD_NAME_EXCEPTION_STACKTRACE) String stacktrace,
@JsonProperty(FIELD_NAME_EXCEPTION_TIMESTAMP) long timestamp,
@JsonProperty(FIELD_NAME_FAILURE_LABELS) Map<String, String> failureLabels,
@JsonProperty(FIELD_NAME_TASK_NAME) @Nullable String taskName,
@JsonProperty(FIELD_NAME_ENDPOINT) @Nullable String endpoint,
@JsonProperty(FIELD_NAME_TASK_MANAGER_ID) @Nullable String taskManagerId,
@JsonProperty(FIELD_NAME_CONCURRENT_EXCEPTIONS)
Collection<ExceptionInfo> concurrentExceptions) {
super(
exceptionName,
stacktrace,
timestamp,
failureLabels,
taskName,
endpoint,
taskManagerId);
this.concurrentExceptions = concurrentExceptions;
}
@JsonIgnore
public Collection<ExceptionInfo> getConcurrentExceptions() {
return concurrentExceptions;
}
// hashCode and equals are necessary for the test classes deriving from
// RestResponseMarshallingTestBase
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass() || !super.equals(o)) {
return false;
}
RootExceptionInfo that = (RootExceptionInfo) o;
return getConcurrentExceptions().equals(that.getConcurrentExceptions());
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), getConcurrentExceptions());
}
@Override
public String toString() {
return new StringJoiner(", ", RootExceptionInfo.class.getSimpleName() + "[", "]")
.add("exceptionName='" + getExceptionName() + "'")
.add("stacktrace='" + getStacktrace() + "'")
.add("timestamp=" + getTimestamp())
.add("taskName='" + getTaskName() + "'")
.add("endpoint='" + getEndpoint() + "'")
.add("concurrentExceptions=" + getConcurrentExceptions())
.toString();
}
}
}
|
RootExceptionInfo
|
java
|
elastic__elasticsearch
|
x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java
|
{
"start": 1409,
"end": 8738
}
|
class ____ extends MapperTestCase {
@Override
protected Collection<? extends Plugin> getPlugins() {
return Collections.singletonList(new CountedKeywordMapperPlugin());
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", CountedKeywordFieldMapper.CONTENT_TYPE);
}
@Override
protected Object getSampleValueForDocument() {
return new String[] { "a", "a", "b", "c" };
}
@Override
protected Object getSampleValueForQuery() {
return "b";
}
@Override
protected boolean supportsIgnoreMalformed() {
return false;
}
@Override
protected boolean supportsStoredFields() {
return false;
}
@Override
protected void registerParameters(ParameterChecker checker) {
// Nothing to do
}
@Override
protected Object generateRandomInputValue(MappedFieldType ft) {
return randomBoolean() ? null : randomAlphaOfLengthBetween(1, 10);
}
public void testSyntheticSourceSingleNullValue() throws IOException {
DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> {
b.startObject("field");
minimalMapping(b);
b.endObject();
})).documentMapper();
String expected = "{}";
CheckedConsumer<XContentBuilder, IOException> buildInput = b -> {
b.field("field");
b.nullValue();
};
assertThat(syntheticSource(mapper, buildInput), equalTo(expected));
assertThat(syntheticSource(mapper, new SourceFilter(new String[] { "field" }, null), buildInput), equalTo(expected));
assertThat(syntheticSource(mapper, new SourceFilter(null, new String[] { "field" }), buildInput), equalTo("{}"));
}
public void testSyntheticSourceManyNullValue() throws IOException {
DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> {
b.startObject("field");
minimalMapping(b);
b.endObject();
})).documentMapper();
int nullCount = randomIntBetween(1, 5);
String expected = "{}";
CheckedConsumer<XContentBuilder, IOException> buildInput = b -> {
b.startArray("field");
for (int i = 0; i < nullCount; i++) {
b.nullValue();
}
b.endArray();
};
assertThat(syntheticSource(mapper, buildInput), equalTo(expected));
assertThat(syntheticSource(mapper, new SourceFilter(new String[] { "field" }, null), buildInput), equalTo(expected));
assertThat(syntheticSource(mapper, new SourceFilter(null, new String[] { "field" }), buildInput), equalTo("{}"));
}
public void testSyntheticSourceIndexLevelKeepArrays() throws IOException {
SyntheticSourceExample example = syntheticSourceSupportForKeepTests(shouldUseIgnoreMalformed(), Mapper.SourceKeepMode.ARRAYS)
.example(1);
XContentBuilder mappings = mapping(b -> {
b.startObject("field");
example.mapping().accept(b);
b.endObject();
});
var settings = Settings.builder()
.put("index.mapping.source.mode", "synthetic")
.put("index.mapping.synthetic_source_keep", "arrays")
.build();
DocumentMapper mapperAll = createMapperService(getVersion(), settings, () -> true, mappings).documentMapper();
int elementCount = randomIntBetween(2, 5);
CheckedConsumer<XContentBuilder, IOException> buildInput = (XContentBuilder builder) -> {
example.buildInputArray(builder, elementCount);
};
var builder = XContentFactory.jsonBuilder();
builder.startObject();
buildInput.accept(builder);
builder.endObject();
String expected = Strings.toString(builder);
String actual = syntheticSource(mapperAll, buildInput);
assertThat(actual, equalTo(expected));
}
@Override
protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) {
return new SyntheticSourceSupport() {
@Override
public SyntheticSourceExample example(int maxValues) throws IOException {
if (randomBoolean()) {
Tuple<String, String> v = generateValue();
return new SyntheticSourceExample(v.v1(), v.v2(), this::mapping);
}
int maxNullValues = 5;
List<Tuple<String, String>> values = randomList(1, maxValues, this::generateValue);
List<String> in = Stream.concat(values.stream().map(Tuple::v1), randomList(0, maxNullValues, () -> (String) null).stream())
.toList();
in = shuffledList(in);
List<String> outList = values.stream().map(Tuple::v2).sorted().toList();
Object out = outList.size() == 1 ? outList.get(0) : outList;
return new SyntheticSourceExample(in, out, this::mapping);
}
private final Set<String> previousValues = new HashSet<>();
private Tuple<String, String> generateValue() {
String v;
if (previousValues.size() > 0 && randomBoolean()) {
v = randomFrom(previousValues);
} else {
v = ESTestCase.randomAlphaOfLength(5);
previousValues.add(v);
}
return Tuple.tuple(v, v);
}
private void mapping(XContentBuilder b) throws IOException {
minimalMapping(b);
}
@Override
public List<SyntheticSourceInvalidExample> invalidExample() throws IOException {
return List.of();
}
};
}
@Override
protected IngestScriptSupport ingestScriptSupport() {
throw new AssumptionViolatedException("not supported");
}
public void testDottedFieldNames() throws IOException {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("dotted.field");
b.field("type", CountedKeywordFieldMapper.CONTENT_TYPE);
b.endObject();
}));
ParsedDocument doc = mapper.parse(source(b -> b.field("dotted.field", "1234")));
List<IndexableField> fields = doc.rootDoc().getFields("dotted.field");
assertEquals(1, fields.size());
}
public void testDisableIndex() throws IOException {
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", CountedKeywordFieldMapper.CONTENT_TYPE).field("index", false))
);
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234")));
List<IndexableField> fields = doc.rootDoc().getFields("field");
assertEquals(1, fields.size());
assertEquals(IndexOptions.NONE, fields.get(0).fieldType().indexOptions());
assertEquals(DocValuesType.SORTED_SET, fields.get(0).fieldType().docValuesType());
}
@Override
protected List<SortShortcutSupport> getSortShortcutSupport() {
return List.of();
}
@Override
protected boolean supportsDocValuesSkippers() {
return false;
}
}
|
CountedKeywordFieldMapperTests
|
java
|
apache__kafka
|
group-coordinator/src/test/java/org/apache/kafka/coordinator/group/GroupConfigManagerTest.java
|
{
"start": 1749,
"end": 3734
}
|
class ____ {
private GroupConfigManager configManager;
@BeforeEach
public void setUp() {
configManager = createConfigManager();
}
@AfterEach
public void tearDown() {
if (configManager != null) {
configManager.close();
}
}
@Test
public void testUpdateConfigWithInvalidGroupId() {
assertThrows(InvalidRequestException.class,
() -> configManager.updateGroupConfig("", new Properties()));
}
@Test
public void testGetNonExistentGroupConfig() {
Optional<GroupConfig> groupConfig = configManager.groupConfig("foo");
assertFalse(groupConfig.isPresent());
}
@Test
public void testUpdateGroupConfig() {
String groupId = "foo";
Properties props = new Properties();
props.put(CONSUMER_SESSION_TIMEOUT_MS_CONFIG, 50000);
props.put(CONSUMER_HEARTBEAT_INTERVAL_MS_CONFIG, 6000);
configManager.updateGroupConfig(groupId, props);
Optional<GroupConfig> configOptional = configManager.groupConfig(groupId);
assertTrue(configOptional.isPresent());
GroupConfig config = configOptional.get();
assertEquals(50000, config.getInt(CONSUMER_SESSION_TIMEOUT_MS_CONFIG));
assertEquals(6000, config.getInt(CONSUMER_HEARTBEAT_INTERVAL_MS_CONFIG));
}
public static GroupConfigManager createConfigManager() {
Map<String, String> defaultConfig = new HashMap<>();
defaultConfig.put(CONSUMER_SESSION_TIMEOUT_MS_CONFIG, String.valueOf(GroupCoordinatorConfig.CONSUMER_GROUP_SESSION_TIMEOUT_MS_DEFAULT));
defaultConfig.put(CONSUMER_HEARTBEAT_INTERVAL_MS_CONFIG, String.valueOf(GroupCoordinatorConfig.CONSUMER_GROUP_HEARTBEAT_INTERVAL_MS_DEFAULT));
defaultConfig.put(SHARE_RECORD_LOCK_DURATION_MS_CONFIG, String.valueOf(ShareGroupConfig.SHARE_GROUP_RECORD_LOCK_DURATION_MS_DEFAULT));
return new GroupConfigManager(defaultConfig);
}
}
|
GroupConfigManagerTest
|
java
|
micronaut-projects__micronaut-core
|
core-processor/src/main/java/io/micronaut/inject/writer/BeanDefinitionVisitor.java
|
{
"start": 4951,
"end": 5183
}
|
class ____ be a subclass of
* {@link io.micronaut.context.AbstractInitializableBeanDefinition}.
*
* @param name The super type
*/
void visitSuperBeanDefinition(String name);
/**
* Alter the super
|
should
|
java
|
apache__camel
|
components/camel-disruptor/src/test/java/org/apache/camel/component/disruptor/vm/DisruptorVmWaitForTaskIfReplyExpectedTest.java
|
{
"start": 1167,
"end": 2951
}
|
class ____ extends AbstractVmTestSupport {
@Test
void testInOut() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
String out = template2.requestBody("direct:start", "Hello World", String.class);
assertEquals("Bye World", out);
MockEndpoint.assertIsSatisfied(context);
}
@Test
void testInOnly() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
Exchange out = template2.send("direct:start", new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody("Hello World");
exchange.setPattern(ExchangePattern.InOnly);
}
});
// we do not expect a reply and thus do no wait so we just get our own input back
assertEquals("Hello World", out.getIn().getBody());
// Should return the in message as no reply is expected
assertEquals("Hello World", out.getMessage().getBody());
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("disruptor-vm:foo?waitForTaskToComplete=IfReplyExpected")
.transform(constant("Bye World")).to("mock:result");
}
};
}
@Override
protected RouteBuilder createRouteBuilderForSecondContext() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("disruptor-vm:foo?waitForTaskToComplete=IfReplyExpected");
}
};
}
}
|
DisruptorVmWaitForTaskIfReplyExpectedTest
|
java
|
square__retrofit
|
retrofit-adapters/rxjava/src/main/java/retrofit2/adapter/rxjava/ResultOnSubscribe.java
|
{
"start": 980,
"end": 1340
}
|
class ____<T> implements OnSubscribe<Result<T>> {
private final OnSubscribe<Response<T>> upstream;
ResultOnSubscribe(OnSubscribe<Response<T>> upstream) {
this.upstream = upstream;
}
@Override
public void call(Subscriber<? super Result<T>> subscriber) {
upstream.call(new ResultSubscriber<T>(subscriber));
}
private static
|
ResultOnSubscribe
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/MonoOnAssembly.java
|
{
"start": 1511,
"end": 2782
}
|
class ____<T> extends InternalMonoOperator<T, T> implements Fuseable,
AssemblyOp {
final AssemblySnapshot stacktrace;
/**
* Create an assembly trace exposed as a {@link Mono}.
*/
MonoOnAssembly(Mono<? extends T> source, AssemblySnapshot stacktrace) {
super(source);
this.stacktrace = stacktrace;
}
@Override
public CoreSubscriber<? super T> subscribeOrReturn(CoreSubscriber<? super T> actual) {
if (actual instanceof ConditionalSubscriber) {
@SuppressWarnings("unchecked") ConditionalSubscriber<? super T> cs =
(ConditionalSubscriber<? super T>) actual;
return new FluxOnAssembly.OnAssemblyConditionalSubscriber<>(cs, stacktrace, source, this);
}
else {
return new FluxOnAssembly.OnAssemblySubscriber<>(actual, stacktrace, source, this);
}
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.ACTUAL_METADATA) return !stacktrace.isCheckpoint;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
@Override
public String stepName() {
return stacktrace.operatorAssemblyInformation();
}
@Override
public String toString() {
return stacktrace.operatorAssemblyInformation();
}
}
|
MonoOnAssembly
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/web/client/response/ExecutingResponseCreator.java
|
{
"start": 2004,
"end": 2946
}
|
class ____ implements ResponseCreator {
private final ClientHttpRequestFactory requestFactory;
/**
* Create an instance with the given {@code ClientHttpRequestFactory}.
* @param requestFactory the request factory to delegate to
*/
public ExecutingResponseCreator(ClientHttpRequestFactory requestFactory) {
this.requestFactory = requestFactory;
}
@Override
public ClientHttpResponse createResponse(@Nullable ClientHttpRequest request) throws IOException {
Assert.state(request instanceof MockClientHttpRequest, "Expected a MockClientHttpRequest");
MockClientHttpRequest mockRequest = (MockClientHttpRequest) request;
ClientHttpRequest newRequest = this.requestFactory.createRequest(mockRequest.getURI(), mockRequest.getMethod());
newRequest.getHeaders().putAll(mockRequest.getHeaders());
StreamUtils.copy(mockRequest.getBodyAsBytes(), newRequest.getBody());
return newRequest.execute();
}
}
|
ExecutingResponseCreator
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_193_ibatis.java
|
{
"start": 303,
"end": 890
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "select a from x where a in (${x})";
// System.out.println(sql);
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("SELECT a\n" +
"FROM x\n" +
"WHERE a IN (${x})", stmt.toString());
}
}
|
MySqlSelectTest_193_ibatis
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/jmx/export/assembler/AbstractReflectiveMBeanInfoAssembler.java
|
{
"start": 15927,
"end": 16162
}
|
class ____ be used for the JMX descriptor field "class".
* Only applied when the "exposeClassDescriptor" property is "true".
* <p>The default implementation returns the first implemented interface
* for a JDK proxy, and the target
|
to
|
java
|
apache__flink
|
flink-yarn/src/main/java/org/apache/flink/yarn/cli/FallbackYarnSessionCli.java
|
{
"start": 1200,
"end": 1604
}
|
class ____ extends AbstractYarnCli {
public FallbackYarnSessionCli(Configuration configuration) {
super(configuration, "y", "yarn");
}
@Override
public boolean isActive(CommandLine commandLine) {
if (super.isActive(commandLine)) {
throw new IllegalStateException(YarnDeploymentTarget.ERROR_MESSAGE);
}
return false;
}
}
|
FallbackYarnSessionCli
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java
|
{
"start": 4102,
"end": 9067
}
|
class ____ extends IOException {
public InvalidToken(String msg) {
super(msg);
}
}
/**
* Create the password for the given identifier.
* identifier may be modified inside this method.
* @param identifier the identifier to use
* @return the new password
*/
protected abstract byte[] createPassword(T identifier);
/**
* Retrieve the password for the given token identifier. Should check the date
* or registry to make sure the token hasn't expired or been revoked. Returns
* the relevant password.
* @param identifier the identifier to validate
* @return the password to use
* @throws InvalidToken the token was invalid
*/
public abstract byte[] retrievePassword(T identifier)
throws InvalidToken;
/**
* The same functionality with {@link #retrievePassword}, except that this
* method can throw a {@link RetriableException} or a {@link StandbyException}
* to indicate that client can retry/failover the same operation because of
* temporary issue on the server side.
*
* @param identifier the identifier to validate
* @return the password to use
* @throws InvalidToken the token was invalid
* @throws StandbyException the server is in standby state, the client can
* try other servers
* @throws RetriableException the token was invalid, and the server thinks
* this may be a temporary issue and suggests the client to retry
* @throws IOException to allow future exceptions to be added without breaking
* compatibility
*/
public byte[] retriableRetrievePassword(T identifier)
throws InvalidToken, StandbyException, RetriableException, IOException {
return retrievePassword(identifier);
}
/**
* Create an empty token identifier.
* @return the newly created empty token identifier
*/
public abstract T createIdentifier();
/**
* No-op if the secret manager is available for reading tokens, throw a
* StandbyException otherwise.
*
* @throws StandbyException if the secret manager is not available to read
* tokens
*/
public void checkAvailableForRead() throws StandbyException {
// Default to being available for read.
}
/**
* Generate a new random secret key.
* @return the new key
*/
protected SecretKey generateSecret() {
synchronized (keyGenLock) {
if (keyGen == null) {
keyGen = createKeyGenerator();
}
return keyGen.generateKey();
}
}
/**
* Compute HMAC of the identifier using the secret key and return the
* output as password
* @param identifier the bytes of the identifier
* @param key the secret key
* @return the bytes of the generated password
*/
public static byte[] createPassword(byte[] identifier,
SecretKey key) {
Mac mac = threadLocalMac.get();
try {
mac.init(key);
} catch (InvalidKeyException ike) {
throw new IllegalArgumentException("Invalid key to HMAC computation",
ike);
}
return mac.doFinal(identifier);
}
/**
* Convert the byte[] to a secret key
* @param key the byte[] to create a secret key from
* @return the secret key
*/
protected static SecretKey createSecretKey(byte[] key) {
LOG.debug("Creating secretKey with algorithm {} with thread {}",
selectedAlgorithm, Thread.currentThread());
secretKeyInitialized = true;
return new SecretKeySpec(key, selectedAlgorithm);
}
/**
* Creates a new {@link KeyGenerator} instance configured with the currently selected
* algorithm and key length.
*
* @return a new {@code KeyGenerator} instance
* @throws IllegalArgumentException if the specified algorithm is not available
*/
private static synchronized KeyGenerator createKeyGenerator() {
LOG.debug("Creating key generator instance {} - {} bit with thread {}",
selectedAlgorithm, selectedLength, Thread.currentThread());
try {
KeyGenerator keyGen = KeyGenerator.getInstance(selectedAlgorithm);
keyGen.init(selectedLength);
keygenInitialized = true;
return keyGen;
} catch (NoSuchAlgorithmException nsa) {
throw new IllegalArgumentException("Can't find " + selectedAlgorithm, nsa);
}
}
/**
* Creates a new {@link Mac} instance using the currently selected algorithm.
*
* @return a new {@code Mac} instance
* @throws IllegalArgumentException if the specified algorithm is not available
*/
private static synchronized Mac createMac() {
LOG.debug("Creating mac instance {} with thread {}", selectedAlgorithm, Thread.currentThread());
try {
Mac mac = Mac.getInstance(selectedAlgorithm);
macInitialized = true;
return mac;
} catch (NoSuchAlgorithmException nsa) {
throw new IllegalArgumentException("Can't find " + selectedAlgorithm, nsa);
}
}
}
|
InvalidToken
|
java
|
apache__hadoop
|
hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java
|
{
"start": 908,
"end": 972
}
|
class ____ executing an external process from a mojo.
*/
public
|
for
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java
|
{
"start": 8890,
"end": 9237
}
|
class ____ extends Collector {
@Override
public void collect(int doc, long bucketOrd) {
// no-op
}
@Override
public void postCollect() {
// no-op
}
@Override
public void close() {
// no-op
}
}
private abstract static
|
EmptyCollector
|
java
|
quarkusio__quarkus
|
integration-tests/native-config-profile/src/test/java/io/quarkus/it/nat/test/profile/BuiltTimeProfileChangeManualIT.java
|
{
"start": 1109,
"end": 1335
}
|
class ____ {
@Test
public void unusedExists() {
Assertions.fail("Expected to fail in io.quarkus.test.junit.NativeTestExtension.beforeEach(ExtensionContext)");
}
public static
|
BuiltTimeProfileChangeManualIT
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/LagAggFunctionTest.java
|
{
"start": 1458,
"end": 2391
}
|
class ____
extends AggFunctionTestBase<StringData, StringData, LagAggFunction.LagAcc<StringData>> {
@Override
protected List<List<StringData>> getInputValueSets() {
return Arrays.asList(
Collections.singletonList(fromString("1")),
Arrays.asList(fromString("1"), null),
Arrays.asList(null, null),
Arrays.asList(null, fromString("10")));
}
@Override
protected List<StringData> getExpectedResults() {
return Arrays.asList(null, fromString("1"), null, null);
}
@Override
protected AggregateFunction<StringData, LagAggFunction.LagAcc<StringData>> getAggregator() {
return new LagAggFunction<>(
new LogicalType[] {new VarCharType(), new IntType(), new CharType()});
}
@Override
protected Class<?> getAccClass() {
return LagAggFunction.LagAcc.class;
}
}
|
LagAggFunctionTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/SingleTableDiscriminatorFormulaTest.java
|
{
"start": 1016,
"end": 2549
}
|
class ____ {
@AfterEach
void tearDown(EntityManagerFactoryScope scope) {
scope.dropData();
}
@Test
@RequiresDialect(value = PostgreSQLDialect.class)
public void test(EntityManagerFactoryScope scope) {
scope.inTransaction(entityManager -> {
DebitAccount debitAccount = new DebitAccount("123-debit");
debitAccount.setId(1L);
debitAccount.setOwner("John Doe");
debitAccount.setBalance(BigDecimal.valueOf(100));
debitAccount.setInterestRate(BigDecimal.valueOf(1.5d));
debitAccount.setOverdraftFee(BigDecimal.valueOf(25));
CreditAccount creditAccount = new CreditAccount("456-credit");
creditAccount.setId(2L);
creditAccount.setOwner("John Doe");
creditAccount.setBalance(BigDecimal.valueOf(1000));
creditAccount.setInterestRate(BigDecimal.valueOf(1.9d));
creditAccount.setCreditLimit(BigDecimal.valueOf(5000));
entityManager.persist(debitAccount);
entityManager.persist(creditAccount);
});
scope.inTransaction(entityManager -> {
var accounts = entityManager.createQuery("select a from Account a").getResultList();
assertEquals(2, accounts.size());
});
}
//tag::entity-inheritance-single-table-discriminator-formula-example[]
@Entity(name = "Account")
@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
@DiscriminatorFormula(
"case when debitKey is not null " +
"then 'Debit' " +
"else (" +
" case when creditKey is not null " +
" then 'Credit' " +
" else 'Unknown' " +
" end) " +
"end "
)
public static
|
SingleTableDiscriminatorFormulaTest
|
java
|
apache__camel
|
core/camel-core-processor/src/main/java/org/apache/camel/processor/aggregate/AggregationStrategyBeanAdapter.java
|
{
"start": 5668,
"end": 8326
}
|
class ____ lookup methods
if (method.getName().equals("getMetaClass") || method.getName().equals("setMetaClass")
|| method.getName().equals("$getLookup")) {
return false;
}
if (method.getDeclaringClass().getName().startsWith("groovy.lang")) {
return false;
}
// return type must not be void and it should not be a bridge method
if (method.getReturnType().equals(Void.TYPE) || method.isBridge()) {
return false;
}
return true;
}
private static boolean isStaticMethod(Method method) {
return Modifier.isStatic(method.getModifiers());
}
@Override
protected void doStart() throws Exception {
Method found = null;
if (methodName != null) {
for (Method method : type.getMethods()) {
if (isValidMethod(method) && method.getName().equals(methodName)) {
if (found == null) {
found = method;
} else {
throw new IllegalArgumentException(
"The bean " + type + " has 2 or more methods with the name " + methodName);
}
}
}
} else {
for (Method method : type.getMethods()) {
if (isValidMethod(method)) {
if (found == null) {
found = method;
} else {
throw new IllegalArgumentException(
"The bean " + type + " has 2 or more methods and no explicit method name was configured.");
}
}
}
}
if (found == null) {
throw new UnsupportedOperationException(
"Cannot find a valid method with name: " + methodName + " on bean type: " + type);
}
// if its not a static method then we must have an instance of the pojo
if (!isStaticMethod(found) && pojo == null) {
pojo = camelContext.getInjector().newInstance(type);
}
// create the method info which has adapted to the pojo
AggregationStrategyBeanInfo bi = new AggregationStrategyBeanInfo(type, found);
mi = bi.createMethodInfo();
// in case the POJO is CamelContextAware
CamelContextAware.trySetCamelContext(pojo, getCamelContext());
// in case the pojo is a service
ServiceHelper.startService(pojo);
}
@Override
protected void doStop() throws Exception {
ServiceHelper.stopService(pojo);
}
}
|
and
|
java
|
quarkusio__quarkus
|
integration-tests/grpc-plain-text-mutiny/src/main/java/io/quarkus/grpc/examples/hello/HelloWorldEndpoint.java
|
{
"start": 863,
"end": 3264
}
|
class ____ {
@GrpcClient("hello")
GreeterGrpc.GreeterBlockingStub blockingHelloClient;
@GrpcClient("hello")
MutinyGreeterGrpc.MutinyGreeterStub mutinyHelloClient;
@GrpcClient("hello")
Greeter interfaceHelloClient;
@Inject
IncomingInterceptor interceptor;
@GET
@Path("/blocking/{name}")
public String helloBlocking(@PathParam("name") String name, @QueryParam("headers") boolean headers) {
Metadata extraHeaders = new Metadata();
if (headers) {
extraHeaders.put(EXTRA_BLOCKING_HEADER, "my-blocking-value");
}
HelloReply reply = GrpcClientUtils.attachHeaders(blockingHelloClient, extraHeaders)
.sayHello(HelloRequest.newBuilder().setName(name).build());
return generateResponse(reply);
}
@GET
@Path("/mutiny/{name}")
public Uni<String> helloMutiny(@PathParam("name") String name, @QueryParam("headers") boolean headers) {
Metadata extraHeaders = new Metadata();
if (headers) {
extraHeaders.put(EXTRA_HEADER, "my-extra-value");
}
MutinyGreeterGrpc.MutinyGreeterStub alteredClient = GrpcClientUtils.attachHeaders(mutinyHelloClient, extraHeaders);
return alteredClient.sayHello(HelloRequest.newBuilder().setName(name).build())
.onItem().transform(this::generateResponse);
}
@GET
@Path("/interface/{name}")
public Uni<String> helloInterface(@PathParam("name") String name, @QueryParam("headers") boolean headers) {
Metadata extraHeaders = new Metadata();
if (headers) {
extraHeaders.put(INTERFACE_HEADER, "my-interface-value");
}
Greeter alteredClient = GrpcClientUtils.attachHeaders(interfaceHelloClient, extraHeaders);
return alteredClient.sayHello(HelloRequest.newBuilder().setName(name).build())
.onItem().transform(this::generateResponse);
}
@DELETE
public void clear() {
interceptor.clear();
}
@GET
@Path("/headers")
@Produces(MediaType.APPLICATION_JSON)
public Map<String, String> getCollectedHeaders() {
return interceptor.getCollectedHeaders();
}
public String generateResponse(HelloReply reply) {
return String.format("%s! HelloWorldService has been called %d number of times.", reply.getMessage(), reply.getCount());
}
}
|
HelloWorldEndpoint
|
java
|
apache__camel
|
components/camel-test/camel-test-spring-junit5/src/test/java/org/apache/camel/test/spring/CamelSpringActiveProfileTest.java
|
{
"start": 1622,
"end": 2099
}
|
class ____ {
@Autowired
protected CamelContext camelContext;
@Produce("direct:start")
protected ProducerTemplate start;
@Test
public void testLoadActiveProfile() throws InterruptedException {
MockEndpoint mock = camelContext.getEndpoint("mock:test", MockEndpoint.class);
mock.expectedBodiesReceived("Hello World");
start.sendBody("World");
mock.assertIsSatisfied();
}
}
// END SNIPPET: e1
|
CamelSpringActiveProfileTest
|
java
|
google__error-prone
|
check_api/src/test/java/com/google/errorprone/util/ASTHelpersTest.java
|
{
"start": 52862,
"end": 53703
}
|
class ____ extends A {
@Override
public <T> void doIt(T t) {}
@Override
public int doItAgain(int i) {return 42;}
}
""");
TestScanner scanner =
new TestScanner() {
@Override
public Void visitMethod(MethodTree tree, VisitorState state) {
setAssertionsComplete();
Symbol sym = ASTHelpers.getSymbol(tree);
assertThat(ASTHelpers.isSubtype(sym.asType(), sym.asType(), state)).isFalse();
return super.visitMethod(tree, state);
}
};
tests.add(scanner);
assertCompiles(scanner);
}
/** Comments on method invocations with their receiver chain. */
@BugPattern(
summary = "Comments on method invocations with their receiver chain.",
severity = WARNING)
public static final
|
B
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/groovy/GroovyBeanDefinitionReader.java
|
{
"start": 5311,
"end": 13703
}
|
class ____ extends AbstractBeanDefinitionReader implements GroovyObject {
/**
* Standard {@code XmlBeanDefinitionReader} created with default
* settings for loading bean definitions from XML files.
*/
private final XmlBeanDefinitionReader standardXmlBeanDefinitionReader;
/**
* Groovy DSL {@code XmlBeanDefinitionReader} for loading bean definitions
* via the Groovy DSL, typically configured with XML validation disabled.
*/
private final XmlBeanDefinitionReader groovyDslXmlBeanDefinitionReader;
private final Map<String, String> namespaces = new HashMap<>();
private final Map<String, DeferredProperty> deferredProperties = new HashMap<>();
private MetaClass metaClass = GroovySystem.getMetaClassRegistry().getMetaClass(getClass());
private @Nullable Binding binding;
private @Nullable GroovyBeanDefinitionWrapper currentBeanDefinition;
/**
* Create a new {@code GroovyBeanDefinitionReader} for the given
* {@link BeanDefinitionRegistry}.
* @param registry the {@code BeanDefinitionRegistry} to load bean definitions into
*/
public GroovyBeanDefinitionReader(BeanDefinitionRegistry registry) {
super(registry);
this.standardXmlBeanDefinitionReader = new XmlBeanDefinitionReader(registry);
this.groovyDslXmlBeanDefinitionReader = new XmlBeanDefinitionReader(registry);
this.groovyDslXmlBeanDefinitionReader.setValidating(false);
}
/**
* Create a new {@code GroovyBeanDefinitionReader} based on the given
* {@link XmlBeanDefinitionReader}, loading bean definitions into its
* {@code BeanDefinitionRegistry} and delegating Groovy DSL loading to it.
* <p>The supplied {@code XmlBeanDefinitionReader} should typically
* be pre-configured with XML validation disabled.
* @param xmlBeanDefinitionReader the {@code XmlBeanDefinitionReader} to
* derive the registry from and to delegate Groovy DSL loading to
*/
public GroovyBeanDefinitionReader(XmlBeanDefinitionReader xmlBeanDefinitionReader) {
super(xmlBeanDefinitionReader.getRegistry());
this.standardXmlBeanDefinitionReader = new XmlBeanDefinitionReader(xmlBeanDefinitionReader.getRegistry());
this.groovyDslXmlBeanDefinitionReader = xmlBeanDefinitionReader;
}
@Override
public void setMetaClass(MetaClass metaClass) {
this.metaClass = metaClass;
}
@Override
public MetaClass getMetaClass() {
return this.metaClass;
}
/**
* Set the binding, i.e. the Groovy variables available in the scope
* of a {@code GroovyBeanDefinitionReader} closure.
*/
public void setBinding(Binding binding) {
this.binding = binding;
}
/**
* Return a specified binding for Groovy variables, if any.
*/
public @Nullable Binding getBinding() {
return this.binding;
}
// TRADITIONAL BEAN DEFINITION READER METHODS
/**
* Load bean definitions from the specified Groovy script or XML file.
* <p>Note that {@code ".xml"} files will be parsed as XML content; all other kinds
* of resources will be parsed as Groovy scripts.
* @param resource the resource descriptor for the Groovy script or XML file
* @return the number of bean definitions found
* @throws BeanDefinitionStoreException in case of loading or parsing errors
*/
@Override
public int loadBeanDefinitions(Resource resource) throws BeanDefinitionStoreException {
return loadBeanDefinitions(new EncodedResource(resource));
}
/**
* Load bean definitions from the specified Groovy script or XML file.
* <p>Note that {@code ".xml"} files will be parsed as XML content; all other kinds
* of resources will be parsed as Groovy scripts.
* @param encodedResource the resource descriptor for the Groovy script or XML file,
* allowing specification of an encoding to use for parsing the file
* @return the number of bean definitions found
* @throws BeanDefinitionStoreException in case of loading or parsing errors
*/
public int loadBeanDefinitions(EncodedResource encodedResource) throws BeanDefinitionStoreException {
// Check for XML files and redirect them to the "standard" XmlBeanDefinitionReader
String filename = encodedResource.getResource().getFilename();
if (StringUtils.endsWithIgnoreCase(filename, ".xml")) {
return this.standardXmlBeanDefinitionReader.loadBeanDefinitions(encodedResource);
}
if (logger.isTraceEnabled()) {
logger.trace("Loading Groovy bean definitions from " + encodedResource);
}
@SuppressWarnings("serial")
Closure<Object> beans = new Closure<>(this) {
@Override
public @Nullable Object call(Object... args) {
invokeBeanDefiningClosure((Closure<?>) args[0]);
return null;
}
};
Binding binding = new Binding() {
@Override
public void setVariable(String name, Object value) {
if (currentBeanDefinition != null) {
applyPropertyToBeanDefinition(name, value);
}
else {
super.setVariable(name, value);
}
}
};
binding.setVariable("beans", beans);
int countBefore = getRegistry().getBeanDefinitionCount();
try {
GroovyShell shell = new GroovyShell(getBeanClassLoader(), binding);
shell.evaluate(encodedResource.getReader(), "beans");
}
catch (Throwable ex) {
throw new BeanDefinitionParsingException(new Problem("Error evaluating Groovy script: " + ex.getMessage(),
new Location(encodedResource.getResource()), null, ex));
}
int count = getRegistry().getBeanDefinitionCount() - countBefore;
if (logger.isDebugEnabled()) {
logger.debug("Loaded " + count + " bean definitions from " + encodedResource);
}
return count;
}
// METHODS FOR CONSUMPTION IN A GROOVY CLOSURE
/**
* Defines a set of beans for the given block or closure.
* @param closure the block or closure
* @return this {@code GroovyBeanDefinitionReader} instance
*/
public GroovyBeanDefinitionReader beans(Closure<?> closure) {
return invokeBeanDefiningClosure(closure);
}
/**
* Define an inner bean definition.
* @param type the bean type
* @return the bean definition
*/
public GenericBeanDefinition bean(Class<?> type) {
GenericBeanDefinition beanDefinition = new GenericBeanDefinition();
beanDefinition.setBeanClass(type);
return beanDefinition;
}
/**
* Define an inner bean definition.
* @param type the bean type
* @param args the constructors arguments and closure configurer
* @return the bean definition
*/
public AbstractBeanDefinition bean(Class<?> type, Object...args) {
GroovyBeanDefinitionWrapper current = this.currentBeanDefinition;
try {
Closure<?> callable = null;
Collection<Object> constructorArgs = null;
if (!ObjectUtils.isEmpty(args)) {
int index = args.length;
Object lastArg = args[index - 1];
if (lastArg instanceof Closure<?> closure) {
callable = closure;
index--;
}
constructorArgs = resolveConstructorArguments(args, 0, index);
}
this.currentBeanDefinition = new GroovyBeanDefinitionWrapper(null, type, constructorArgs);
if (callable != null) {
callable.call(this.currentBeanDefinition);
}
return this.currentBeanDefinition.getBeanDefinition();
}
finally {
this.currentBeanDefinition = current;
}
}
/**
* Define a Spring XML namespace definition to use.
* @param definition the namespace definition
*/
public void xmlns(Map<String, String> definition) {
if (!definition.isEmpty()) {
for (Map.Entry<String,String> entry : definition.entrySet()) {
String namespace = entry.getKey();
String uri = entry.getValue();
if (uri == null) {
throw new IllegalArgumentException("Namespace definition must supply a non-null URI");
}
NamespaceHandler namespaceHandler =
this.groovyDslXmlBeanDefinitionReader.getNamespaceHandlerResolver().resolve(uri);
if (namespaceHandler == null) {
throw new BeanDefinitionParsingException(new Problem("No namespace handler found for URI: " + uri,
new Location(new DescriptiveResource(("Groovy")))));
}
this.namespaces.put(namespace, uri);
}
}
}
/**
* Import Spring bean definitions from either XML or Groovy sources into the
* current bean builder instance.
* @param resourcePattern the resource pattern
*/
public void importBeans(String resourcePattern) throws IOException {
loadBeanDefinitions(resourcePattern);
}
// INTERNAL HANDLING OF GROOVY CLOSURES AND PROPERTIES
/**
* This method overrides method invocation to create beans for each method name that
* takes a
|
GroovyBeanDefinitionReader
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/TestDeprecate.java
|
{
"start": 107,
"end": 311
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
VO vo = new VO();
vo.setId(123);
String text = JSON.toJSONString(vo);
}
public static
|
TestDeprecate
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java
|
{
"start": 1332,
"end": 3083
}
|
class ____ extends ActionType<MlMemoryAction.Response> {
public static final MlMemoryAction INSTANCE = new MlMemoryAction();
public static final String NAME = "cluster:monitor/xpack/ml/memory/stats/get";
static final String MEM = "mem";
static final String TOTAL = "total";
static final String TOTAL_IN_BYTES = "total_in_bytes";
static final String ADJUSTED_TOTAL = "adjusted_total";
static final String ADJUSTED_TOTAL_IN_BYTES = "adjusted_total_in_bytes";
static final String ML = "ml";
static final String MAX = "max";
static final String MAX_IN_BYTES = "max_in_bytes";
static final String NATIVE_CODE_OVERHEAD = "native_code_overhead";
static final String NATIVE_CODE_OVERHEAD_IN_BYTES = "native_code_overhead_in_bytes";
static final String ANOMALY_DETECTORS = "anomaly_detectors";
static final String ANOMALY_DETECTORS_IN_BYTES = "anomaly_detectors_in_bytes";
static final String DATA_FRAME_ANALYTICS = "data_frame_analytics";
static final String DATA_FRAME_ANALYTICS_IN_BYTES = "data_frame_analytics_in_bytes";
static final String NATIVE_INFERENCE = "native_inference";
static final String NATIVE_INFERENCE_IN_BYTES = "native_inference_in_bytes";
static final String JVM = "jvm";
static final String HEAP_MAX = "heap_max";
static final String HEAP_MAX_IN_BYTES = "heap_max_in_bytes";
static final String JAVA_INFERENCE_MAX = "java_inference_max";
static final String JAVA_INFERENCE_MAX_IN_BYTES = "java_inference_max_in_bytes";
static final String JAVA_INFERENCE = "java_inference";
static final String JAVA_INFERENCE_IN_BYTES = "java_inference_in_bytes";
private MlMemoryAction() {
super(NAME);
}
public static
|
MlMemoryAction
|
java
|
apache__maven
|
impl/maven-core/src/main/java/org/apache/maven/project/ProjectModelResolver.java
|
{
"start": 2314,
"end": 11694
}
|
class ____ implements ModelResolver {
private final RepositorySystemSession session;
private final RequestTrace trace;
private final String context = "project";
private List<RemoteRepository> repositories;
private List<RemoteRepository> pomRepositories;
private final List<RemoteRepository> externalRepositories;
private final RepositorySystem resolver;
private final RemoteRepositoryManager remoteRepositoryManager;
private final Set<String> repositoryIds;
private final ReactorModelPool modelPool;
private final ProjectBuildingRequest.RepositoryMerging repositoryMerging;
public ProjectModelResolver(
RepositorySystemSession session,
RequestTrace trace,
RepositorySystem resolver,
RemoteRepositoryManager remoteRepositoryManager,
List<RemoteRepository> repositories,
ProjectBuildingRequest.RepositoryMerging repositoryMerging,
ReactorModelPool modelPool) {
this.session = session;
this.trace = trace;
this.resolver = resolver;
this.remoteRepositoryManager = remoteRepositoryManager;
this.pomRepositories = new ArrayList<>();
this.externalRepositories = Collections.unmodifiableList(new ArrayList<>(repositories));
this.repositories = new ArrayList<>();
this.repositories.addAll(externalRepositories);
this.repositoryMerging = repositoryMerging;
this.repositoryIds = new HashSet<>();
this.modelPool = modelPool;
}
private ProjectModelResolver(ProjectModelResolver original) {
this.session = original.session;
this.trace = original.trace;
this.resolver = original.resolver;
this.remoteRepositoryManager = original.remoteRepositoryManager;
this.pomRepositories = new ArrayList<>(original.pomRepositories);
this.externalRepositories = original.externalRepositories;
this.repositories = new ArrayList<>(original.repositories);
this.repositoryMerging = original.repositoryMerging;
this.repositoryIds = new HashSet<>(original.repositoryIds);
this.modelPool = original.modelPool;
}
@Override
public void addRepository(Repository repository) throws InvalidRepositoryException {
addRepository(repository, false);
}
@Override
public void addRepository(final Repository repository, boolean replace) throws InvalidRepositoryException {
if (!repositoryIds.add(repository.getId())) {
if (!replace) {
return;
}
// Remove any previous repository with this Id
removeMatchingRepository(repositories, repository.getId());
removeMatchingRepository(pomRepositories, repository.getId());
}
List<RemoteRepository> newRepositories =
Collections.singletonList(ArtifactDescriptorUtils.toRemoteRepository(repository.getDelegate()));
if (ProjectBuildingRequest.RepositoryMerging.REQUEST_DOMINANT.equals(repositoryMerging)) {
repositories = remoteRepositoryManager.aggregateRepositories(session, repositories, newRepositories, true);
} else {
pomRepositories =
remoteRepositoryManager.aggregateRepositories(session, pomRepositories, newRepositories, true);
repositories = remoteRepositoryManager.aggregateRepositories(
session, pomRepositories, externalRepositories, false);
}
}
private static void removeMatchingRepository(Iterable<RemoteRepository> repositories, final String id) {
Iterator iterator = repositories.iterator();
while (iterator.hasNext()) {
RemoteRepository next = (RemoteRepository) iterator.next();
if (next.getId().equals(id)) {
iterator.remove();
}
}
}
@Override
public ModelResolver newCopy() {
return new ProjectModelResolver(this);
}
@Override
public ModelSource resolveModel(String groupId, String artifactId, String version)
throws UnresolvableModelException {
File pomFile = null;
if (modelPool != null) {
pomFile = modelPool.get(groupId, artifactId, version);
}
if (pomFile == null) {
Artifact pomArtifact = new DefaultArtifact(groupId, artifactId, "", "pom", version);
try {
ArtifactRequest request = new ArtifactRequest(pomArtifact, repositories, context);
request.setTrace(trace);
pomArtifact = resolver.resolveArtifact(session, request).getArtifact();
} catch (ArtifactResolutionException e) {
throw new UnresolvableModelException(e.getMessage(), groupId, artifactId, version, e);
}
pomFile = pomArtifact.getFile();
}
return new FileModelSource(pomFile);
}
@Override
public ModelSource resolveModel(final Parent parent) throws UnresolvableModelException {
try {
final Artifact artifact =
new DefaultArtifact(parent.getGroupId(), parent.getArtifactId(), "", "pom", parent.getVersion());
final VersionRangeRequest versionRangeRequest = new VersionRangeRequest(artifact, repositories, context);
versionRangeRequest.setTrace(trace);
final VersionRangeResult versionRangeResult = resolver.resolveVersionRange(session, versionRangeRequest);
if (versionRangeResult.getHighestVersion() == null) {
throw new UnresolvableModelException(
String.format(
"No versions matched the requested parent version range '%s'", parent.getVersion()),
parent.getGroupId(),
parent.getArtifactId(),
parent.getVersion());
}
if (versionRangeResult.getVersionConstraint() != null
&& versionRangeResult.getVersionConstraint().getRange() != null
&& versionRangeResult.getVersionConstraint().getRange().getUpperBound() == null) {
// Message below is checked for in the MNG-2199 core IT.
throw new UnresolvableModelException(
String.format(
"The requested parent version range '%s' does not specify an upper bound",
parent.getVersion()),
parent.getGroupId(),
parent.getArtifactId(),
parent.getVersion());
}
parent.setVersion(versionRangeResult.getHighestVersion().toString());
return resolveModel(parent.getGroupId(), parent.getArtifactId(), parent.getVersion());
} catch (final VersionRangeResolutionException e) {
throw new UnresolvableModelException(
e.getMessage(), parent.getGroupId(), parent.getArtifactId(), parent.getVersion(), e);
}
}
@Override
public ModelSource resolveModel(final Dependency dependency) throws UnresolvableModelException {
try {
final Artifact artifact = new DefaultArtifact(
dependency.getGroupId(), dependency.getArtifactId(), "", "pom", dependency.getVersion());
final VersionRangeRequest versionRangeRequest = new VersionRangeRequest(artifact, repositories, context);
versionRangeRequest.setTrace(trace);
final VersionRangeResult versionRangeResult = resolver.resolveVersionRange(session, versionRangeRequest);
if (versionRangeResult.getHighestVersion() == null) {
throw new UnresolvableModelException(
String.format(
"No versions matched the requested dependency version range '%s'",
dependency.getVersion()),
dependency.getGroupId(),
dependency.getArtifactId(),
dependency.getVersion());
}
if (versionRangeResult.getVersionConstraint() != null
&& versionRangeResult.getVersionConstraint().getRange() != null
&& versionRangeResult.getVersionConstraint().getRange().getUpperBound() == null) {
// Message below is checked for in the MNG-4463 core IT.
throw new UnresolvableModelException(
String.format(
"The requested dependency version range '%s' does not specify an upper bound",
dependency.getVersion()),
dependency.getGroupId(),
dependency.getArtifactId(),
dependency.getVersion());
}
dependency.setVersion(versionRangeResult.getHighestVersion().toString());
return resolveModel(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion());
} catch (VersionRangeResolutionException e) {
throw new UnresolvableModelException(
e.getMessage(), dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion(), e);
}
}
}
|
ProjectModelResolver
|
java
|
apache__flink
|
flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcServiceUtils.java
|
{
"start": 9018,
"end": 9874
}
|
enum ____ {
TCP,
SSL_TCP
}
// ------------------------------------------------------------------------
// RPC service configuration
// ------------------------------------------------------------------------
public static long extractMaximumFramesize(Configuration configuration) {
String maxFrameSizeStr = configuration.get(RpcOptions.FRAMESIZE);
String configStr = String.format(SIMPLE_CONFIG_TEMPLATE, maxFrameSizeStr);
Config config = ConfigFactory.parseString(configStr);
return config.getBytes(MAXIMUM_FRAME_SIZE_PATH);
}
// ------------------------------------------------------------------------
// RPC service builder
// ------------------------------------------------------------------------
/** Builder for {@link PekkoRpcService}. */
static
|
Protocol
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesReservation.java
|
{
"start": 6168,
"end": 7367
}
|
class ____ extends AuthenticationFilter {
@Override
protected Properties getConfiguration(String configPrefix,
FilterConfig filterConfig) throws ServletException {
Properties props = new Properties();
Enumeration<?> names = filterConfig.getInitParameterNames();
while (names.hasMoreElements()) {
String name = (String) names.nextElement();
if (name.startsWith(configPrefix)) {
String value = filterConfig.getInitParameter(name);
props.put(name.substring(configPrefix.length()), value);
}
}
props.put(AuthenticationFilter.AUTH_TYPE, "simple");
props.put(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
return props;
}
}
@Override
protected Application configure() {
config = new ResourceConfig();
config.register(RMWebServices.class);
config.register(GenericExceptionHandler.class);
if (setAuthFilter) {
config.register(TestRMCustomAuthFilter.class);
}
config.register(new JettisonFeature()).register(JAXBContextResolver.class);
forceSet(TestProperties.CONTAINER_PORT, JERSEY_RANDOM_PORT);
return config;
}
private
|
TestRMCustomAuthFilter
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HdfsDtFetcher.java
|
{
"start": 1566,
"end": 3066
}
|
class ____ implements DtFetcher {
private static final Logger LOG =
LoggerFactory.getLogger(HdfsDtFetcher.class);
private static final String SERVICE_NAME = HdfsConstants.HDFS_URI_SCHEME;
private static final String FETCH_FAILED = "Fetch of delegation token failed";
/**
* Returns the service name for HDFS, which is also a valid URL prefix.
*/
public Text getServiceName() {
return new Text(SERVICE_NAME);
}
public boolean isTokenRequired() {
return UserGroupInformation.isSecurityEnabled();
}
/**
* Returns Token object via FileSystem, null if bad argument.
* @param conf - a Configuration object used with FileSystem.get()
* @param creds - a Credentials object to which token(s) will be added
* @param renewer - the renewer to send with the token request
* @param url - the URL to which the request is sent
* @return a Token, or null if fetch fails.
*/
public Token<?> addDelegationTokens(Configuration conf, Credentials creds,
String renewer, String url) throws Exception {
if (!url.startsWith(getServiceName().toString())) {
url = getServiceName().toString() + "://" + url;
}
FileSystem fs = FileSystem.get(URI.create(url), conf);
Token<?> token = fs.getDelegationToken(renewer);
if (token == null) {
LOG.error(FETCH_FAILED);
throw new IOException(FETCH_FAILED);
}
creds.addToken(token.getService(), token);
return token;
}
}
|
HdfsDtFetcher
|
java
|
square__javapoet
|
src/test/java/com/squareup/javapoet/TypeSpecTest.java
|
{
"start": 26899,
"end": 28567
}
|
interface ____ extends Serializable, Comparable<Taco> {\n"
+ "}\n");
}
@Test public void nestedClasses() throws Exception {
ClassName taco = ClassName.get(tacosPackage, "Combo", "Taco");
ClassName topping = ClassName.get(tacosPackage, "Combo", "Taco", "Topping");
ClassName chips = ClassName.get(tacosPackage, "Combo", "Chips");
ClassName sauce = ClassName.get(tacosPackage, "Combo", "Sauce");
TypeSpec typeSpec = TypeSpec.classBuilder("Combo")
.addField(taco, "taco")
.addField(chips, "chips")
.addType(TypeSpec.classBuilder(taco.simpleName())
.addModifiers(Modifier.STATIC)
.addField(ParameterizedTypeName.get(ClassName.get(List.class), topping), "toppings")
.addField(sauce, "sauce")
.addType(TypeSpec.enumBuilder(topping.simpleName())
.addEnumConstant("SHREDDED_CHEESE")
.addEnumConstant("LEAN_GROUND_BEEF")
.build())
.build())
.addType(TypeSpec.classBuilder(chips.simpleName())
.addModifiers(Modifier.STATIC)
.addField(topping, "topping")
.addField(sauce, "dippingSauce")
.build())
.addType(TypeSpec.enumBuilder(sauce.simpleName())
.addEnumConstant("SOUR_CREAM")
.addEnumConstant("SALSA")
.addEnumConstant("QUESO")
.addEnumConstant("MILD")
.addEnumConstant("FIRE")
.build())
.build();
assertThat(toString(typeSpec)).isEqualTo(""
+ "package com.squareup.tacos;\n"
+ "\n"
+ "import java.util.List;\n"
+ "\n"
+ "
|
Taco
|
java
|
elastic__elasticsearch
|
x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/MultiFieldsInnerRetrieverUtils.java
|
{
"start": 1787,
"end": 1912
}
|
class ____ managing and validating the multi-fields query format for the {@link LinearRetrieverBuilder} retriever.
*/
public
|
for
|
java
|
apache__dubbo
|
dubbo-compatible/src/test/java/org/apache/dubbo/metadata/annotation/processing/util/TypeUtilsTest.java
|
{
"start": 4708,
"end": 22504
}
|
class ____ extends AbstractAnnotationProcessingTest {
private TypeElement testType;
@Override
protected void addCompiledClasses(Set<Class<?>> classesToBeCompiled) {
classesToBeCompiled.add(ArrayTypeModel.class);
classesToBeCompiled.add(Color.class);
}
@Override
protected void beforeEach() {
testType = getType(TestServiceImpl.class);
}
@Test
void testIsSimpleType() {
assertTrue(isSimpleType(getType(Void.class)));
assertTrue(isSimpleType(getType(Boolean.class)));
assertTrue(isSimpleType(getType(Character.class)));
assertTrue(isSimpleType(getType(Byte.class)));
assertTrue(isSimpleType(getType(Short.class)));
assertTrue(isSimpleType(getType(Integer.class)));
assertTrue(isSimpleType(getType(Long.class)));
assertTrue(isSimpleType(getType(Float.class)));
assertTrue(isSimpleType(getType(Double.class)));
assertTrue(isSimpleType(getType(String.class)));
assertTrue(isSimpleType(getType(BigDecimal.class)));
assertTrue(isSimpleType(getType(BigInteger.class)));
assertTrue(isSimpleType(getType(Date.class)));
assertTrue(isSimpleType(getType(Object.class)));
assertFalse(isSimpleType(getType(getClass())));
assertFalse(isSimpleType((TypeElement) null));
assertFalse(isSimpleType((TypeMirror) null));
}
@Test
void testIsSameType() {
assertTrue(isSameType(getType(Void.class).asType(), "java.lang.Void"));
assertFalse(isSameType(getType(String.class).asType(), "java.lang.Void"));
assertFalse(isSameType(getType(Void.class).asType(), (Type) null));
assertFalse(isSameType(null, (Type) null));
assertFalse(isSameType(getType(Void.class).asType(), (String) null));
assertFalse(isSameType(null, (String) null));
}
@Test
void testIsArrayType() {
TypeElement type = getType(ArrayTypeModel.class);
assertTrue(isArrayType(findField(type.asType(), "integers").asType()));
assertTrue(isArrayType(findField(type.asType(), "strings").asType()));
assertTrue(isArrayType(findField(type.asType(), "primitiveTypeModels").asType()));
assertTrue(isArrayType(findField(type.asType(), "models").asType()));
assertTrue(isArrayType(findField(type.asType(), "colors").asType()));
assertFalse(isArrayType((Element) null));
assertFalse(isArrayType((TypeMirror) null));
}
@Test
void testIsEnumType() {
TypeElement type = getType(Color.class);
assertTrue(isEnumType(type.asType()));
type = getType(ArrayTypeModel.class);
assertFalse(isEnumType(type.asType()));
assertFalse(isEnumType((Element) null));
assertFalse(isEnumType((TypeMirror) null));
}
@Test
void testIsClassType() {
TypeElement type = getType(ArrayTypeModel.class);
assertTrue(isClassType(type.asType()));
type = getType(Model.class);
assertTrue(isClassType(type.asType()));
assertFalse(isClassType((Element) null));
assertFalse(isClassType((TypeMirror) null));
}
@Test
void testIsPrimitiveType() {
TypeElement type = getType(PrimitiveTypeModel.class);
getDeclaredFields(type.asType()).stream()
.map(VariableElement::asType)
.forEach(t -> assertTrue(isPrimitiveType(t)));
assertFalse(isPrimitiveType(getType(ArrayTypeModel.class)));
assertFalse(isPrimitiveType((Element) null));
assertFalse(isPrimitiveType((TypeMirror) null));
}
@Test
void testIsInterfaceType() {
TypeElement type = getType(CharSequence.class);
assertTrue(isInterfaceType(type));
assertTrue(isInterfaceType(type.asType()));
type = getType(Model.class);
assertFalse(isInterfaceType(type));
assertFalse(isInterfaceType(type.asType()));
assertFalse(isInterfaceType((Element) null));
assertFalse(isInterfaceType((TypeMirror) null));
}
@Test
void testIsAnnotationType() {
TypeElement type = getType(Override.class);
assertTrue(isAnnotationType(type));
assertTrue(isAnnotationType(type.asType()));
type = getType(Model.class);
assertFalse(isAnnotationType(type));
assertFalse(isAnnotationType(type.asType()));
assertFalse(isAnnotationType((Element) null));
assertFalse(isAnnotationType((TypeMirror) null));
}
@Test
void testGetHierarchicalTypes() {
Set hierarchicalTypes = getHierarchicalTypes(testType.asType(), true, true, true);
Iterator iterator = hierarchicalTypes.iterator();
assertEquals(8, hierarchicalTypes.size());
assertEquals(
"org.apache.dubbo.metadata.tools.TestServiceImpl",
iterator.next().toString());
assertEquals(
"org.apache.dubbo.metadata.tools.GenericTestService",
iterator.next().toString());
assertEquals(
"org.apache.dubbo.metadata.tools.DefaultTestService",
iterator.next().toString());
assertEquals("java.lang.Object", iterator.next().toString());
assertEquals(
"org.apache.dubbo.metadata.tools.TestService", iterator.next().toString());
assertEquals("java.lang.AutoCloseable", iterator.next().toString());
assertEquals("java.io.Serializable", iterator.next().toString());
assertEquals("java.util.EventListener", iterator.next().toString());
hierarchicalTypes = getHierarchicalTypes(testType);
iterator = hierarchicalTypes.iterator();
assertEquals(8, hierarchicalTypes.size());
assertEquals(
"org.apache.dubbo.metadata.tools.TestServiceImpl",
iterator.next().toString());
assertEquals(
"org.apache.dubbo.metadata.tools.GenericTestService",
iterator.next().toString());
assertEquals(
"org.apache.dubbo.metadata.tools.DefaultTestService",
iterator.next().toString());
assertEquals("java.lang.Object", iterator.next().toString());
assertEquals(
"org.apache.dubbo.metadata.tools.TestService", iterator.next().toString());
assertEquals("java.lang.AutoCloseable", iterator.next().toString());
assertEquals("java.io.Serializable", iterator.next().toString());
assertEquals("java.util.EventListener", iterator.next().toString());
hierarchicalTypes = getHierarchicalTypes(testType.asType(), Object.class);
iterator = hierarchicalTypes.iterator();
assertEquals(7, hierarchicalTypes.size());
assertEquals(
"org.apache.dubbo.metadata.tools.TestServiceImpl",
iterator.next().toString());
assertEquals(
"org.apache.dubbo.metadata.tools.GenericTestService",
iterator.next().toString());
assertEquals(
"org.apache.dubbo.metadata.tools.DefaultTestService",
iterator.next().toString());
assertEquals(
"org.apache.dubbo.metadata.tools.TestService", iterator.next().toString());
assertEquals("java.lang.AutoCloseable", iterator.next().toString());
assertEquals("java.io.Serializable", iterator.next().toString());
assertEquals("java.util.EventListener", iterator.next().toString());
hierarchicalTypes = getHierarchicalTypes(testType.asType(), true, true, false);
iterator = hierarchicalTypes.iterator();
assertEquals(4, hierarchicalTypes.size());
assertEquals(
"org.apache.dubbo.metadata.tools.TestServiceImpl",
iterator.next().toString());
assertEquals(
"org.apache.dubbo.metadata.tools.GenericTestService",
iterator.next().toString());
assertEquals(
"org.apache.dubbo.metadata.tools.DefaultTestService",
iterator.next().toString());
assertEquals("java.lang.Object", iterator.next().toString());
hierarchicalTypes = getHierarchicalTypes(testType.asType(), true, false, true);
iterator = hierarchicalTypes.iterator();
assertEquals(5, hierarchicalTypes.size());
assertEquals(
"org.apache.dubbo.metadata.tools.TestServiceImpl",
iterator.next().toString());
assertEquals(
"org.apache.dubbo.metadata.tools.TestService", iterator.next().toString());
assertEquals("java.lang.AutoCloseable", iterator.next().toString());
assertEquals("java.io.Serializable", iterator.next().toString());
assertEquals("java.util.EventListener", iterator.next().toString());
hierarchicalTypes = getHierarchicalTypes(testType.asType(), false, false, true);
iterator = hierarchicalTypes.iterator();
assertEquals(4, hierarchicalTypes.size());
assertEquals(
"org.apache.dubbo.metadata.tools.TestService", iterator.next().toString());
assertEquals("java.lang.AutoCloseable", iterator.next().toString());
assertEquals("java.io.Serializable", iterator.next().toString());
assertEquals("java.util.EventListener", iterator.next().toString());
hierarchicalTypes = getHierarchicalTypes(testType.asType(), true, false, false);
iterator = hierarchicalTypes.iterator();
assertEquals(1, hierarchicalTypes.size());
assertEquals(
"org.apache.dubbo.metadata.tools.TestServiceImpl",
iterator.next().toString());
hierarchicalTypes = getHierarchicalTypes(testType.asType(), false, false, false);
assertEquals(0, hierarchicalTypes.size());
assertTrue(getHierarchicalTypes((TypeElement) null).isEmpty());
assertTrue(getHierarchicalTypes((TypeMirror) null).isEmpty());
}
@Test
void testGetInterfaces() {
TypeElement type = getType(Model.class);
List<TypeMirror> interfaces = getInterfaces(type);
assertTrue(interfaces.isEmpty());
interfaces = getInterfaces(testType.asType());
assertEquals(3, interfaces.size());
assertEquals(
"org.apache.dubbo.metadata.tools.TestService", interfaces.get(0).toString());
assertEquals("java.lang.AutoCloseable", interfaces.get(1).toString());
assertEquals("java.io.Serializable", interfaces.get(2).toString());
assertTrue(getInterfaces((TypeElement) null).isEmpty());
assertTrue(getInterfaces((TypeMirror) null).isEmpty());
}
@Test
void testGetAllInterfaces() {
Set<? extends TypeMirror> interfaces = getAllInterfaces(testType.asType());
assertEquals(4, interfaces.size());
Iterator<? extends TypeMirror> iterator = interfaces.iterator();
assertEquals(
"org.apache.dubbo.metadata.tools.TestService", iterator.next().toString());
assertEquals("java.lang.AutoCloseable", iterator.next().toString());
assertEquals("java.io.Serializable", iterator.next().toString());
assertEquals("java.util.EventListener", iterator.next().toString());
Set<TypeElement> allInterfaces = getAllInterfaces(testType);
assertEquals(4, interfaces.size());
Iterator<TypeElement> allIterator = allInterfaces.iterator();
assertEquals(
"org.apache.dubbo.metadata.tools.TestService",
allIterator.next().toString());
assertEquals("java.lang.AutoCloseable", allIterator.next().toString());
assertEquals("java.io.Serializable", allIterator.next().toString());
assertEquals("java.util.EventListener", allIterator.next().toString());
assertTrue(getAllInterfaces((TypeElement) null).isEmpty());
assertTrue(getAllInterfaces((TypeMirror) null).isEmpty());
}
@Test
void testGetType() {
TypeElement element = TypeUtils.getType(processingEnv, String.class);
assertEquals(element, TypeUtils.getType(processingEnv, element.asType()));
assertEquals(element, TypeUtils.getType(processingEnv, "java.lang.String"));
assertNull(TypeUtils.getType(processingEnv, (Type) null));
assertNull(TypeUtils.getType(processingEnv, (TypeMirror) null));
assertNull(TypeUtils.getType(processingEnv, (CharSequence) null));
assertNull(TypeUtils.getType(null, (CharSequence) null));
}
@Test
void testGetSuperType() {
TypeElement gtsTypeElement = getSuperType(testType);
assertEquals(gtsTypeElement, getType(GenericTestService.class));
TypeElement dtsTypeElement = getSuperType(gtsTypeElement);
assertEquals(dtsTypeElement, getType(DefaultTestService.class));
TypeMirror gtsType = getSuperType(testType.asType());
assertEquals(gtsType, getType(GenericTestService.class).asType());
TypeMirror dtsType = getSuperType(gtsType);
assertEquals(dtsType, getType(DefaultTestService.class).asType());
assertNull(getSuperType((TypeElement) null));
assertNull(getSuperType((TypeMirror) null));
}
@Test
void testGetAllSuperTypes() {
Set<?> allSuperTypes = getAllSuperTypes(testType);
Iterator<?> iterator = allSuperTypes.iterator();
assertEquals(3, allSuperTypes.size());
assertEquals(iterator.next(), getType(GenericTestService.class));
assertEquals(iterator.next(), getType(DefaultTestService.class));
assertEquals(iterator.next(), getType(Object.class));
allSuperTypes = getAllSuperTypes(testType);
iterator = allSuperTypes.iterator();
assertEquals(3, allSuperTypes.size());
assertEquals(iterator.next(), getType(GenericTestService.class));
assertEquals(iterator.next(), getType(DefaultTestService.class));
assertEquals(iterator.next(), getType(Object.class));
assertTrue(getAllSuperTypes((TypeElement) null).isEmpty());
assertTrue(getAllSuperTypes((TypeMirror) null).isEmpty());
}
@Test
void testIsDeclaredType() {
assertTrue(isDeclaredType(testType));
assertTrue(isDeclaredType(testType.asType()));
assertFalse(isDeclaredType((Element) null));
assertFalse(isDeclaredType((TypeMirror) null));
assertFalse(isDeclaredType(types.getNullType()));
assertFalse(isDeclaredType(types.getPrimitiveType(TypeKind.BYTE)));
assertFalse(isDeclaredType(types.getArrayType(types.getPrimitiveType(TypeKind.BYTE))));
}
@Test
void testOfDeclaredType() {
assertEquals(testType.asType(), ofDeclaredType(testType));
assertEquals(testType.asType(), ofDeclaredType(testType.asType()));
assertEquals(ofDeclaredType(testType), ofDeclaredType(testType.asType()));
assertNull(ofDeclaredType((Element) null));
assertNull(ofDeclaredType((TypeMirror) null));
}
@Test
void testIsTypeElement() {
assertTrue(isTypeElement(testType));
assertTrue(isTypeElement(testType.asType()));
assertFalse(isTypeElement((Element) null));
assertFalse(isTypeElement((TypeMirror) null));
}
@Test
void testOfTypeElement() {
assertEquals(testType, ofTypeElement(testType));
assertEquals(testType, ofTypeElement(testType.asType()));
assertNull(ofTypeElement((Element) null));
assertNull(ofTypeElement((TypeMirror) null));
}
@Test
void testOfDeclaredTypes() {
Set<DeclaredType> declaredTypes =
ofDeclaredTypes(asList(getType(String.class), getType(TestServiceImpl.class), getType(Color.class)));
assertTrue(declaredTypes.contains(getType(String.class).asType()));
assertTrue(declaredTypes.contains(getType(TestServiceImpl.class).asType()));
assertTrue(declaredTypes.contains(getType(Color.class).asType()));
assertTrue(ofDeclaredTypes(null).isEmpty());
}
@Test
void testListDeclaredTypes() {
List<DeclaredType> types = listDeclaredTypes(asList(testType, testType, testType));
assertEquals(1, types.size());
assertEquals(ofDeclaredType(testType), types.get(0));
types = listDeclaredTypes(asList(new Element[] {null}));
assertTrue(types.isEmpty());
}
@Test
void testListTypeElements() {
List<TypeElement> typeElements = listTypeElements(asList(testType.asType(), ofDeclaredType(testType)));
assertEquals(1, typeElements.size());
assertEquals(testType, typeElements.get(0));
typeElements = listTypeElements(
asList(types.getPrimitiveType(TypeKind.BYTE), types.getNullType(), types.getNoType(TypeKind.NONE)));
assertTrue(typeElements.isEmpty());
typeElements = listTypeElements(asList(new TypeMirror[] {null}));
assertTrue(typeElements.isEmpty());
typeElements = listTypeElements(null);
assertTrue(typeElements.isEmpty());
}
@Test
@Disabled
public void testGetResource() throws URISyntaxException {
URL resource = getResource(processingEnv, testType);
assertNotNull(resource);
assertTrue(new File(resource.toURI()).exists());
assertEquals(resource, getResource(processingEnv, testType.asType()));
assertEquals(resource, getResource(processingEnv, "org.apache.dubbo.metadata.tools.TestServiceImpl"));
assertThrows(RuntimeException.class, () -> getResource(processingEnv, "NotFound"));
}
@Test
void testGetResourceName() {
assertEquals("java/lang/String.class", getResourceName("java.lang.String"));
assertNull(getResourceName(null));
}
}
|
TypeUtilsTest
|
java
|
apache__kafka
|
connect/runtime/src/test/java/org/apache/kafka/connect/runtime/WorkerConfigTransformerTest.java
|
{
"start": 5388,
"end": 6364
}
|
class ____ implements ConfigProvider {
@Override
public void configure(Map<String, ?> configs) {
}
@Override
public ConfigData get(String path) {
return null;
}
@Override
public ConfigData get(String path, Set<String> keys) {
if (path.equals(TEST_PATH)) {
if (keys.contains(TEST_KEY)) {
return new ConfigData(Map.of(TEST_KEY, TEST_RESULT));
} else if (keys.contains(TEST_KEY_WITH_TTL)) {
return new ConfigData(Map.of(TEST_KEY_WITH_TTL, TEST_RESULT_WITH_TTL), 1L);
} else if (keys.contains(TEST_KEY_WITH_LONGER_TTL)) {
return new ConfigData(Map.of(TEST_KEY_WITH_LONGER_TTL, TEST_RESULT_WITH_LONGER_TTL), 10L);
}
}
return new ConfigData(Map.of());
}
@Override
public void close() {
}
}
}
|
TestConfigProvider
|
java
|
apache__camel
|
components/camel-xslt/src/main/java/org/apache/camel/component/xslt/XsltOutput.java
|
{
"start": 852,
"end": 915
}
|
enum ____ {
string,
bytes,
DOM,
file
}
|
XsltOutput
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.