language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
quarkusio__quarkus
|
integration-tests/maven/src/test/resources-filtered/projects/multimodule-pom-reference/rest/src/test/java/org/acme/test/SimpleTest.java
|
{
"start": 108,
"end": 215
}
|
class ____ {
@Test
public void testNothing() {
Assertions.assertTrue(true);
}
}
|
SimpleTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/IterableAndIteratorTest.java
|
{
"start": 3980,
"end": 4158
}
|
class ____ {
String tag;
MyNode next;
}
/** Test List that implements only Iterator */
public static
|
MyNode
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/ArrayElementTypeStrategy.java
|
{
"start": 1324,
"end": 1979
}
|
class ____ implements TypeStrategy {
@Override
public Optional<DataType> inferType(CallContext callContext) {
DataType arrayType = callContext.getArgumentDataTypes().get(0);
final Optional<DataType> legacyArrayElement =
StrategyUtils.extractLegacyArrayElement(arrayType);
if (legacyArrayElement.isPresent()) {
return legacyArrayElement;
}
if (!arrayType.getLogicalType().is(LogicalTypeFamily.COLLECTION)) {
return Optional.empty();
}
return Optional.of(((CollectionDataType) arrayType).getElementDataType().nullable());
}
}
|
ArrayElementTypeStrategy
|
java
|
spring-projects__spring-boot
|
build-plugin/spring-boot-maven-plugin/src/test/java/org/springframework/boot/maven/DependencyFilterTests.java
|
{
"start": 1216,
"end": 1949
}
|
class ____ {
@Test
void excludeFiltersBasedOnPredicate() throws ArtifactFilterException {
DependencyFilter filter = DependencyFilter.exclude(Artifact::isOptional);
ArtifactHandler ah = new DefaultArtifactHandler();
VersionRange v = VersionRange.createFromVersion("1.0.0");
DefaultArtifact a1 = new DefaultArtifact("com.example", "a1", v, "compile", "jar", null, ah, false);
DefaultArtifact a2 = new DefaultArtifact("com.example", "a2", v, "compile", "jar", null, ah, true);
DefaultArtifact a3 = new DefaultArtifact("com.example", "a3", v, "compile", "jar", null, ah, false);
Set<Artifact> filtered = filter.filter(Set.of(a1, a2, a3));
assertThat(filtered).containsExactlyInAnyOrder(a1, a3);
}
}
|
DependencyFilterTests
|
java
|
google__dagger
|
javatests/dagger/functional/scope/GreenModule.java
|
{
"start": 736,
"end": 851
}
|
class ____ {
@Provides
@IntoSet
@GreenScope
static Object green() {
return new Object();
}
}
|
GreenModule
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/join/lookup/keyordered/KeyAccountingUnit.java
|
{
"start": 1100,
"end": 2367
}
|
class ____<K> {
/** The in-flight records that are being processed, their keys are different from each other. */
private final Map<K, Object> noConflictInFlightRecords;
public KeyAccountingUnit() {
this.noConflictInFlightRecords = new ConcurrentHashMap<>();
}
/**
* Occupy a key for processing, the subsequent records with the same key would be blocked until
* the previous key release.
*
* @return true if no one is occupying this key, and this record succeeds to take it.
*/
public boolean occupy(Object record, K key) {
return noConflictInFlightRecords.putIfAbsent(key, record) == null;
}
/**
* Check if a key is occupied for processing.
*
* @return true if no one is occupying this key.
*/
public boolean ifOccupy(K key) {
return !noConflictInFlightRecords.containsKey(key);
}
public void release(Object record, K key) {
if (noConflictInFlightRecords.remove(key) != record) {
throw new IllegalStateException(
String.format(
"The record %s(%s) is trying to release key which it actually does not hold.",
record, key));
}
}
}
|
KeyAccountingUnit
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_264.java
|
{
"start": 896,
"end": 1328
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "select json from t1";
SQLSelectStatement stmt = (SQLSelectStatement) SQLUtils
.parseSingleStatement(sql, DbType.mysql, SQLParserFeature.KeepSourceLocation, SQLParserFeature.EnableSQLBinaryOpExprGroup);
assertEquals("SELECT json\n" +
"FROM t1", stmt.toString());
}
}
|
MySqlSelectTest_264
|
java
|
google__guava
|
android/guava/src/com/google/common/graph/GraphConstants.java
|
{
"start": 724,
"end": 3146
}
|
class ____ {
private GraphConstants() {}
static final int EXPECTED_DEGREE = 2;
static final int DEFAULT_NODE_COUNT = 10;
static final int DEFAULT_EDGE_COUNT = DEFAULT_NODE_COUNT * EXPECTED_DEGREE;
// Load factor and capacity for "inner" (i.e. per node/edge element) hash sets or maps
static final float INNER_LOAD_FACTOR = 1.0f;
static final int INNER_CAPACITY = 2; // ceiling(EXPECTED_DEGREE / INNER_LOAD_FACTOR)
// Error messages
static final String NODE_NOT_IN_GRAPH = "Node %s is not an element of this graph.";
static final String EDGE_NOT_IN_GRAPH = "Edge %s is not an element of this graph.";
static final String NODE_REMOVED_FROM_GRAPH =
"Node %s that was used to generate this set is no longer in the graph.";
static final String NODE_PAIR_REMOVED_FROM_GRAPH =
"Node %s or node %s that were used to generate this set are no longer in the graph.";
static final String EDGE_REMOVED_FROM_GRAPH =
"Edge %s that was used to generate this set is no longer in the graph.";
static final String REUSING_EDGE =
"Edge %s already exists between the following nodes: %s, "
+ "so it cannot be reused to connect the following nodes: %s.";
static final String MULTIPLE_EDGES_CONNECTING =
"Cannot call edgeConnecting() when parallel edges exist between %s and %s. Consider calling "
+ "edgesConnecting() instead.";
static final String PARALLEL_EDGES_NOT_ALLOWED =
"Nodes %s and %s are already connected by a different edge. To construct a graph "
+ "that allows parallel edges, call allowsParallelEdges(true) on the Builder.";
static final String SELF_LOOPS_NOT_ALLOWED =
"Cannot add self-loop edge on node %s, as self-loops are not allowed. To construct a graph "
+ "that allows self-loops, call allowsSelfLoops(true) on the Builder.";
static final String NOT_AVAILABLE_ON_UNDIRECTED =
"Cannot call source()/target() on a EndpointPair from an undirected graph. Consider calling "
+ "adjacentNode(node) if you already have a node, or nodeU()/nodeV() if you don't.";
static final String EDGE_ALREADY_EXISTS = "Edge %s already exists in the graph.";
static final String ENDPOINTS_MISMATCH =
"Mismatch: endpoints' ordering is not compatible with directionality of the graph";
/** Singleton edge value for {@link Graph} implementations backed by {@link ValueGraph}s. */
|
GraphConstants
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/api/AbstractIterableAssert.java
|
{
"start": 4825,
"end": 103928
}
|
class ____<SELF extends AbstractIterableAssert<SELF, ACTUAL, ELEMENT, ELEMENT_ASSERT>,
ACTUAL extends Iterable<? extends ELEMENT>,
ELEMENT,
ELEMENT_ASSERT extends AbstractAssert<ELEMENT_ASSERT, ELEMENT>>
extends AbstractAssertWithComparator<SELF, ACTUAL>
implements ObjectEnumerableAssert<SELF, ELEMENT> {
//@format:on
private static final String ASSERT = "Assert";
private TypeComparators comparatorsByType;
private Map<String, Comparator<?>> comparatorsForElementPropertyOrFieldNames = new TreeMap<>();
private TypeComparators comparatorsForElementPropertyOrFieldTypes;
protected Iterables iterables = Iterables.instance();
protected AbstractIterableAssert(ACTUAL actual, Class<?> selfType) {
super(actual, selfType);
if (actual instanceof SortedSet) {
SortedSet<ELEMENT> sortedSet = (SortedSet<ELEMENT>) actual;
Comparator<? super ELEMENT> comparator = sortedSet.comparator();
if (comparator != null) usingElementComparator(sortedSet.comparator());
}
}
/**
* {@inheritDoc}
*/
@Override
public void isNullOrEmpty() {
iterables.assertNullOrEmpty(info, actual);
}
/**
* {@inheritDoc}
*/
@Override
public void isEmpty() {
iterables.assertEmpty(info, actual);
}
/**
* {@inheritDoc}
*/
@Override
public SELF isNotEmpty() {
iterables.assertNotEmpty(info, actual);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF hasSize(int expected) {
iterables.assertHasSize(info, actual, expected);
return myself;
}
/**
* Verifies that the number of values in the actual iterable is greater than the given boundary.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(Arrays.asList(1, 2, 3)).hasSizeGreaterThan(2);
*
* // assertion will fail
* assertThat(Arrays.asList(1, 2, 3)).hasSizeGreaterThan(3);</code></pre>
*
* @param boundary the given value to compare the actual size to.
* @return {@code this} assertion object.
* @throws AssertionError if the number of values of the actual iterable is not greater than the boundary.
* @since 3.12.0
*/
@Override
public SELF hasSizeGreaterThan(int boundary) {
iterables.assertHasSizeGreaterThan(info, actual, boundary);
return myself;
}
/**
* Verifies that the number of values in the actual iterable is greater than or equal to the given boundary.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass
* assertThat(Arrays.asList(1, 2, 3)).hasSizeGreaterThanOrEqualTo(1)
* .hasSizeGreaterThanOrEqualTo(3);
*
* // assertion will fail
* assertThat(Arrays.asList(1, 2, 3)).hasSizeGreaterThanOrEqualTo(4);</code></pre>
*
* @param boundary the given value to compare the actual size to.
* @return {@code this} assertion object.
* @throws AssertionError if the number of values of the actual iterable is not greater than or equal to the boundary.
* @since 3.12.0
*/
@Override
public SELF hasSizeGreaterThanOrEqualTo(int boundary) {
iterables.assertHasSizeGreaterThanOrEqualTo(info, actual, boundary);
return myself;
}
/**
* Verifies that the number of values in the actual iterable is less than the given boundary.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(Arrays.asList(1, 2, 3)).hasSizeLessThan(4);
*
* // assertion will fail
* assertThat(Arrays.asList(1, 2, 3)).hasSizeLessThan(3);</code></pre>
*
* @param boundary the given value to compare the actual size to.
* @return {@code this} assertion object.
* @throws AssertionError if the number of values of the actual iterable is not less than the boundary.
* @since 3.12.0
*/
@Override
public SELF hasSizeLessThan(int boundary) {
iterables.assertHasSizeLessThan(info, actual, boundary);
return myself;
}
/**
* Verifies that the number of values in the actual iterable is less than or equal to the given boundary.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass
* assertThat(Arrays.asList(1, 2, 3)).hasSizeLessThanOrEqualTo(5)
* .hasSizeLessThanOrEqualTo(3);
*
* // assertion will fail
* assertThat(Arrays.asList(1, 2, 3)).hasSizeLessThanOrEqualTo(2);</code></pre>
*
* @param boundary the given value to compare the actual size to.
* @return {@code this} assertion object.
* @throws AssertionError if the number of values of the actual iterable is not less than or equal to the boundary.
* @since 3.12.0
*/
@Override
public SELF hasSizeLessThanOrEqualTo(int boundary) {
iterables.assertHasSizeLessThanOrEqualTo(info, actual, boundary);
return myself;
}
/**
* Verifies that the number of values in the actual iterable is between the given boundaries (inclusive).
* <p>
* Example:
* <pre><code class='java'> // assertions will pass
* assertThat(Arrays.asList(1, 2, 3)).hasSizeBetween(2, 3)
* .hasSizeBetween(3, 4)
* .hasSizeBetween(3, 3);
*
* // assertion will fail
* assertThat(Arrays.asList(1, 2, 3)).hasSizeBetween(4, 6);</code></pre>
*
* @param lowerBoundary the lower boundary compared to which actual size should be greater than or equal to.
* @param higherBoundary the higher boundary compared to which actual size should be less than or equal to.
* @return {@code this} assertion object.
* @throws AssertionError if the number of values of the actual iterable is not between the boundaries.
* @since 3.12.0
*/
@Override
public SELF hasSizeBetween(int lowerBoundary, int higherBoundary) {
iterables.assertHasSizeBetween(info, actual, lowerBoundary, higherBoundary);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF hasSameSizeAs(Object other) {
iterables.assertHasSameSizeAs(info, actual, other);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF hasSameSizeAs(Iterable<?> other) {
iterables.assertHasSameSizeAs(info, actual, other);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
@SafeVarargs
public final SELF contains(ELEMENT... values) {
return containsForProxy(values);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF containsForProxy(ELEMENT[] values) {
iterables.assertContains(info, actual, values);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
@SafeVarargs
public final SELF containsOnly(ELEMENT... values) {
return containsOnlyForProxy(values);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF containsOnlyForProxy(ELEMENT[] values) {
iterables.assertContainsOnly(info, actual, values);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
@SafeVarargs
public final SELF containsOnlyOnce(ELEMENT... values) {
return containsOnlyOnceForProxy(values);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF containsOnlyOnceForProxy(ELEMENT[] values) {
iterables.assertContainsOnlyOnce(info, actual, values);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF containsOnlyNulls() {
iterables.assertContainsOnlyNulls(info, actual);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
@SafeVarargs
public final SELF containsExactly(ELEMENT... values) {
return containsExactlyForProxy(values);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF containsExactlyForProxy(ELEMENT[] values) {
iterables.assertContainsExactly(info, actual, values);
return myself;
}
/** {@inheritDoc} */
@Override
@SafeVarargs
public final SELF containsExactlyInAnyOrder(ELEMENT... values) {
return containsExactlyInAnyOrderForProxy(values);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF containsExactlyInAnyOrderForProxy(ELEMENT[] values) {
iterables.assertContainsExactlyInAnyOrder(info, actual, values);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF containsExactlyInAnyOrderElementsOf(Iterable<? extends ELEMENT> values) {
return containsExactlyInAnyOrder(toArray(values));
}
/**
* {@inheritDoc}
*/
@Override
public SELF isSubsetOf(Iterable<? extends ELEMENT> values) {
iterables.assertIsSubsetOf(info, actual, values);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
@SafeVarargs
public final SELF isSubsetOf(ELEMENT... values) {
return isSubsetOfForProxy(values);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF isSubsetOfForProxy(ELEMENT[] values) {
iterables.assertIsSubsetOf(info, actual, Arrays.asList(values));
return myself;
}
/**
* {@inheritDoc}
*/
@Override
@SafeVarargs
public final SELF containsSequence(ELEMENT... sequence) {
return containsSequenceForProxy(sequence);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF containsSequenceForProxy(ELEMENT[] sequence) {
iterables.assertContainsSequence(info, actual, sequence);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF containsSequence(Iterable<? extends ELEMENT> sequence) {
checkSequenceIsNotNull(sequence);
iterables.assertContainsSequence(info, actual, toArray(sequence));
return myself;
}
/**
* {@inheritDoc}
*/
@Override
@SafeVarargs
public final SELF doesNotContainSequence(ELEMENT... sequence) {
return doesNotContainSequenceForProxy(sequence);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF doesNotContainSequenceForProxy(ELEMENT[] sequence) {
iterables.assertDoesNotContainSequence(info, actual, sequence);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF doesNotContainSequence(Iterable<? extends ELEMENT> sequence) {
checkSequenceIsNotNull(sequence);
iterables.assertDoesNotContainSequence(info, actual, toArray(sequence));
return myself;
}
/**
* {@inheritDoc}
*/
@Override
@SafeVarargs
public final SELF containsSubsequence(ELEMENT... subsequence) {
return containsSubsequenceForProxy(subsequence);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF containsSubsequenceForProxy(ELEMENT[] subsequence) {
iterables.assertContainsSubsequence(info, actual, subsequence);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF containsSubsequence(Iterable<? extends ELEMENT> subsequence) {
checkSubsequenceIsNotNull(subsequence);
iterables.assertContainsSubsequence(info, actual, toArray(subsequence));
return myself;
}
/**
* {@inheritDoc}
*/
@Override
@SafeVarargs
public final SELF doesNotContainSubsequence(ELEMENT... subsequence) {
return doesNotContainSubsequenceForProxy(subsequence);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF doesNotContainSubsequenceForProxy(ELEMENT[] subsequence) {
iterables.assertDoesNotContainSubsequence(info, actual, subsequence);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF doesNotContainSubsequence(Iterable<? extends ELEMENT> subsequence) {
checkSubsequenceIsNotNull(subsequence);
iterables.assertDoesNotContainSubsequence(info, actual, toArray(subsequence));
return myself;
}
@Override
@SafeVarargs
public final SELF doesNotContain(ELEMENT... values) {
return doesNotContainForProxy(values);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF doesNotContainForProxy(ELEMENT[] values) {
iterables.assertDoesNotContain(info, actual, values);
return myself;
}
@Override
public SELF doesNotContainAnyElementsOf(Iterable<? extends ELEMENT> iterable) {
iterables.assertDoesNotContainAnyElementsOf(info, actual, iterable);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF doesNotHaveDuplicates() {
iterables.assertDoesNotHaveDuplicates(info, actual);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
@SafeVarargs
public final SELF startsWith(ELEMENT... sequence) {
return startsWithForProxy(sequence);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF startsWithForProxy(ELEMENT[] sequence) {
iterables.assertStartsWith(info, actual, sequence);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
@SafeVarargs
public final SELF endsWith(ELEMENT first, ELEMENT... rest) {
return endsWithForProxy(first, rest);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF endsWithForProxy(ELEMENT first, ELEMENT[] rest) {
iterables.assertEndsWith(info, actual, first, rest);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF endsWith(ELEMENT[] sequence) {
iterables.assertEndsWith(info, actual, sequence);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF containsNull() {
iterables.assertContainsNull(info, actual);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF doesNotContainNull() {
iterables.assertDoesNotContainNull(info, actual);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF are(Condition<? super ELEMENT> condition) {
iterables.assertAre(info, actual, condition);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF areNot(Condition<? super ELEMENT> condition) {
iterables.assertAreNot(info, actual, condition);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF have(Condition<? super ELEMENT> condition) {
iterables.assertHave(info, actual, condition);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF doNotHave(Condition<? super ELEMENT> condition) {
iterables.assertDoNotHave(info, actual, condition);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF areAtLeastOne(Condition<? super ELEMENT> condition) {
areAtLeast(1, condition);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF areAtLeast(int times, Condition<? super ELEMENT> condition) {
iterables.assertAreAtLeast(info, actual, times, condition);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF areAtMost(int times, Condition<? super ELEMENT> condition) {
iterables.assertAreAtMost(info, actual, times, condition);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF areExactly(int times, Condition<? super ELEMENT> condition) {
iterables.assertAreExactly(info, actual, times, condition);
return myself;
}
/** {@inheritDoc} */
@Override
public SELF haveAtLeastOne(Condition<? super ELEMENT> condition) {
return haveAtLeast(1, condition);
}
/**
* {@inheritDoc}
*/
@Override
public SELF haveAtLeast(int times, Condition<? super ELEMENT> condition) {
iterables.assertHaveAtLeast(info, actual, times, condition);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF haveAtMost(int times, Condition<? super ELEMENT> condition) {
iterables.assertHaveAtMost(info, actual, times, condition);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF haveExactly(int times, Condition<? super ELEMENT> condition) {
iterables.assertHaveExactly(info, actual, times, condition);
return myself;
}
/**
* Verifies that at least one element in the actual {@code Iterable} has the specified type (matching includes
* subclasses of the given type).
* <p>
* Example:
* <pre><code class='java'> List<Number> numbers = new ArrayList<Number>();
* numbers.add(1);
* numbers.add(2L);
*
* // successful assertion:
* assertThat(numbers).hasAtLeastOneElementOfType(Long.class);
*
* // assertion failure:
* assertThat(numbers).hasAtLeastOneElementOfType(Float.class);</code></pre>
*
* @param expectedType the expected type.
* @return this assertion object.
* @throws NullPointerException if the given type is {@code null}.
* @throws AssertionError if the actual {@code Object} group does not have any elements of the given type.
*/
@Override
public SELF hasAtLeastOneElementOfType(Class<?> expectedType) {
// reuse code from object arrays as the logic is the same
// (ok since this assertion doesn't rely on a comparison strategy)
ObjectArrays.instance().assertHasAtLeastOneElementOfType(info, toArray(actual), expectedType);
return myself;
}
/**
* Verifies that all elements in the actual {@code Iterable} have the specified type (matching includes
* subclasses of the given type).
* <p>
* Example:
* <pre><code class='java'> List<Number> numbers = new ArrayList<Number>();
* numbers.add(1);
* numbers.add(2);
* numbers.add(3);
*
* // successful assertions:
* assertThat(numbers).hasOnlyElementsOfType(Number.class);
* assertThat(numbers).hasOnlyElementsOfType(Integer.class);
*
* // assertion failure:
* assertThat(numbers).hasOnlyElementsOfType(Long.class);</code></pre>
*
* @param expectedType the expected type.
* @return this assertion object.
* @throws NullPointerException if the given type is {@code null}.
* @throws AssertionError if one element is not of the expected type.
*/
@Override
public SELF hasOnlyElementsOfType(Class<?> expectedType) {
// reuse code from object arrays as the logic is the same
// (ok since this assertion doesn't rely on a comparison strategy)
ObjectArrays.instance().assertHasOnlyElementsOfType(info, toArray(actual), expectedType);
return myself;
}
/**
* Verifies that all elements in the actual {@code Iterable} do not have the specified types (including subclasses).
* <p>
* Example:
* <pre><code class='java'> List<Number> numbers = new ArrayList<>();
* numbers.add(1);
* numbers.add(2);
* numbers.add(3.0);
*
* // successful assertions:
* assertThat(numbers).doesNotHaveAnyElementsOfTypes(Long.class, Float.class);
*
* // assertion failure:
* assertThat(numbers).doesNotHaveAnyElementsOfTypes(Long.class, Integer.class);</code></pre>
*
* @param unexpectedTypes the not expected types.
* @return this assertion object.
* @throws NullPointerException if the given type is {@code null}.
* @throws AssertionError if one element's type matches the given types.
* @since 2.9.0 / 3.9.0
*/
@Override
public SELF doesNotHaveAnyElementsOfTypes(Class<?>... unexpectedTypes) {
ObjectArrays.instance().assertDoesNotHaveAnyElementsOfTypes(info, toArray(actual), unexpectedTypes);
return myself;
}
/** {@inheritDoc} */
@Override
public SELF hasOnlyElementsOfTypes(Class<?>... types) {
ObjectArrays.instance().assertHasOnlyElementsOfTypes(info, toArray(actual), types);
return myself;
}
/** {@inheritDoc} */
@Override
public SELF hasExactlyElementsOfTypes(Class<?>... types) {
ObjectArrays.instance().assertHasExactlyElementsOfTypes(info, toArray(actual), types);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
public SELF containsAll(Iterable<? extends ELEMENT> iterable) {
iterables.assertContainsAll(info, actual, iterable);
return myself;
}
/**
* {@inheritDoc}
*/
@Override
@CheckReturnValue
public SELF usingElementComparator(Comparator<? super ELEMENT> elementComparator) {
this.iterables = new Iterables(new ComparatorBasedComparisonStrategy(elementComparator));
// to have the same semantics on base assertions like isEqualTo, we need to use an iterable comparator comparing
// elements with elementComparator parameter
objects = new Objects(new IterableElementComparisonStrategy<>(elementComparator));
return myself;
}
/**
* {@inheritDoc}
*/
@Override
@CheckReturnValue
public SELF usingDefaultElementComparator() {
this.iterables = Iterables.instance();
return usingDefaultComparator();
}
/**
* Verifies that the actual {@link Iterable} contains at least one of the given values.
* <p>
* Example:
* <pre><code class='java'> Iterable<String> abc = Arrays.asList("a", "b", "c");
*
* // assertions will pass
* assertThat(abc).containsAnyOf("b")
* .containsAnyOf("b", "c")
* .containsAnyOf("a", "b", "c")
* .containsAnyOf("a", "b", "c", "d")
* .containsAnyOf("e", "f", "g", "b");
*
* // assertions will fail
* assertThat(abc).containsAnyOf("d");
* assertThat(abc).containsAnyOf("d", "e", "f", "g");</code></pre>
*
* @param values the values whose at least one which is expected to be in the {@code Iterable} under test.
* @return {@code this} assertion object.
* @throws NullPointerException if the array of values is {@code null}.
* @throws IllegalArgumentException if the array of values is empty and the {@code Iterable} under test is not empty.
* @throws AssertionError if the {@code Iterable} under test is {@code null}.
* @throws AssertionError if the {@code Iterable} under test does not contain any of the given {@code values}.
* @since 2.9.0 / 3.9.0
*/
@Override
@SafeVarargs
public final SELF containsAnyOf(ELEMENT... values) {
return containsAnyOfForProxy(values);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected SELF containsAnyOfForProxy(ELEMENT[] values) {
iterables.assertContainsAnyOf(info, actual, values);
return myself;
}
/**
* Verifies that the {@link Iterable} under test contains at least one of the given {@link Iterable} elements.
* <p>
* Example:
* <pre><code class='java'> Iterable<String> abc = Arrays.asList("a", "b", "c");
*
* // assertions will pass
* assertThat(abc).containsAnyElementsOf(Arrays.asList("b"))
* .containsAnyElementsOf(Arrays.asList("b", "c"))
* .containsAnyElementsOf(Arrays.asList("a", "b", "c"))
* .containsAnyElementsOf(Arrays.asList("a", "b", "c", "d"))
* .containsAnyElementsOf(Arrays.asList("e", "f", "g", "b"));
*
* // assertions will fail
* assertThat(abc).containsAnyElementsOf(Arrays.asList("d"));
* assertThat(abc).containsAnyElementsOf(Arrays.asList("d", "e", "f", "g"));</code></pre>
*
* @param iterable the iterable whose at least one element is expected to be in the {@code Iterable} under test.
* @return {@code this} assertion object.
* @throws NullPointerException if the iterable of expected values is {@code null}.
* @throws IllegalArgumentException if the iterable of expected values is empty and the {@code Iterable} under test is not empty.
* @throws AssertionError if the {@code Iterable} under test is {@code null}.
* @throws AssertionError if the {@code Iterable} under test does not contain any of elements from the given {@code Iterable}.
* @since 2.9.0 / 3.9.0
*/
@Override
public SELF containsAnyElementsOf(Iterable<? extends ELEMENT> iterable) {
return containsAnyOf(toArray(iterable));
}
/**
* Extract the values of the given field or property from the Iterable's elements under test into a new Iterable, this new
* Iterable becoming the Iterable under test.
* <p>
* It allows you to test a property/field of the Iterable's elements instead of testing the elements themselves, which
* can be much less work!
* <p>
* Let's take a look at an example to make things clearer:
* <pre><code class='java'> // build a list of TolkienCharacters: a TolkienCharacter has a name, and age and a Race (a specific class)
* // they can be public field or properties, both can be extracted.
* List<TolkienCharacter> fellowshipOfTheRing = new ArrayList<TolkienCharacter>();
*
* fellowshipOfTheRing.add(new TolkienCharacter("Frodo", 33, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Sam", 38, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gandalf", 2020, MAIA));
* fellowshipOfTheRing.add(new TolkienCharacter("Legolas", 1000, ELF));
* fellowshipOfTheRing.add(new TolkienCharacter("Pippin", 28, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gimli", 139, DWARF));
* fellowshipOfTheRing.add(new TolkienCharacter("Aragorn", 87, MAN);
* fellowshipOfTheRing.add(new TolkienCharacter("Boromir", 37, MAN));
*
* // let's verify the names of the TolkienCharacters in fellowshipOfTheRing:
*
* assertThat(fellowshipOfTheRing).extracting("name")
* .contains("Boromir", "Gandalf", "Frodo")
* .doesNotContain("Sauron", "Elrond");
*
* // you can extract nested properties/fields like the name of the race:
*
* assertThat(fellowshipOfTheRing).extracting("race.name")
* .contains("Hobbit", "Elf")
* .doesNotContain("Orc");</code></pre>
* <p>
* A property with the given name is searched for first. If it doesn't exist a field with the given name is looked
* for. If the field does not exist an {@link IntrospectionError} is thrown. By default, private fields are read but,
* you can change this with {@link Assertions#setAllowComparingPrivateFields(boolean)}. Trying to read a private field
* when it's not allowed leads to an {@link IntrospectionError}.
* <p>
* Note that the order of extracted property/field values is consistent with the iteration order of the Iterable under
* test, for example if it's a {@link HashSet}, you won't be able to make any assumptions on the extracted values
* order.
* <hr>
* <p>
* Extracting also support maps, that is, instead of extracting values from an Object, it extracts maps values
* corresponding to the given keys.
* <p>
* Example:
* <pre><code class='java'> Employee yoda = new Employee(1L, new Name("Yoda"), 800);
* Employee luke = new Employee(2L, new Name("Luke"), 22);
* Employee han = new Employee(3L, new Name("Han"), 31);
*
* // build two maps
* Map<String, Employee> map1 = new HashMap<>();
* map1.put("key1", yoda);
* map1.put("key2", luke);
*
* Map<String, Employee> map2 = new HashMap<>();
* map2.put("key1", yoda);
* map2.put("key2", han);
*
* // instead of a list of objects, we have a list of maps
* List<Map<String, Employee>> maps = asList(map1, map2);
*
* // extracting a property in that case = get values from maps using the property as a key
* assertThat(maps).extracting("key2").containsExactly(luke, han);
* assertThat(maps).extracting("key1").containsExactly(yoda, yoda);
*
* // type safe version
* assertThat(maps).extracting(key2, Employee.class).containsExactly(luke, han);
*
* // it works with several keys, extracted values being wrapped in a Tuple
* assertThat(maps).extracting("key1", "key2").containsExactly(tuple(yoda, luke), tuple(yoda, han));
*
* // unknown keys leads to null (map behavior)
* assertThat(maps).extracting("bad key").containsExactly(null, null);</code></pre>
*
* @param propertyOrField the property/field to extract from the elements of the Iterable under test
* @return a new assertion object whose object under test is the list of extracted property/field values.
* @throws IntrospectionError if no field or property exists with the given name in one of the initial
* Iterable's element.
*/
@CheckReturnValue
public AbstractListAssert<?, List<?>, Object, ObjectAssert<Object>> extracting(String propertyOrField) {
List<Object> values = FieldsOrPropertiesExtractor.extract(actual, byName(propertyOrField));
String extractedDescription = extractedDescriptionOf(propertyOrField);
String description = mostRelevantDescription(info.description(), extractedDescription);
return newListAssertInstanceForMethodsChangingElementType(values).as(description);
}
/**
* Extract the result of given method invocation on the Iterable's elements under test into a new Iterable, this new
* Iterable becoming the Iterable under test.
* <p>
* It allows you to test the method results of the Iterable's elements instead of testing the elements themselves. This
* is especially useful for classes that do not conform to the Java Bean's getter specification (i.e. public String
* toString() or public String status() instead of public String getStatus()).
* <p>
* Let's take a look at an example to make things clearer:
* <pre><code class='java'> // Build an array of WesterosHouse, a WesterosHouse has a method: public String sayTheWords()
*
* List<WesterosHouse> greatHouses = new ArrayList<WesterosHouse>();
* greatHouses.add(new WesterosHouse("Stark", "Winter is Coming"));
* greatHouses.add(new WesterosHouse("Lannister", "Hear Me Roar!"));
* greatHouses.add(new WesterosHouse("Greyjoy", "We Do Not Sow"));
* greatHouses.add(new WesterosHouse("Baratheon", "Our is the Fury"));
* greatHouses.add(new WesterosHouse("Martell", "Unbowed, Unbent, Unbroken"));
* greatHouses.add(new WesterosHouse("Tyrell", "Growing Strong"));
*
* // let's verify the words of the great houses of Westeros:
* assertThat(greatHouses).extractingResultOf("sayTheWords")
* .contains("Winter is Coming", "We Do Not Sow", "Hear Me Roar")
* .doesNotContain("Lannisters always pay their debts");</code></pre>
*
* Following requirements have to be met to extract method results:
* <ul>
* <li>method has to be public,</li>
* <li>method cannot accept any arguments,</li>
* <li>method cannot return void.</li>
* </ul>
* <p>
* Note that the order of extracted results is consistent with the iteration order of the Iterable under test, for
* example if it's a {@link HashSet}, you won't be able to make any assumptions on the extracted results order.
*
* @param method the name of the method which result is to be extracted from the array under test
* @return a new assertion object whose object under test is the Iterable of extracted values.
* @throws IllegalArgumentException if no method exists with the given name, or method is not public, or method does
* return void, or method accepts arguments.
*/
@CheckReturnValue
public AbstractListAssert<?, List<?>, Object, ObjectAssert<Object>> extractingResultOf(String method) {
// can't refactor by calling extractingResultOf(method, Object.class) as SoftAssertion would fail
List<Object> values = FieldsOrPropertiesExtractor.extract(actual, resultOf(method));
String extractedDescription = extractedDescriptionOfMethod(method);
String description = mostRelevantDescription(info.description(), extractedDescription);
return newListAssertInstanceForMethodsChangingElementType(values).as(description);
}
/**
* Extract the result of given method invocation on the Iterable's elements under test into a new list of the given
* class, this new List becoming the object under test.
* <p>
* It allows you to test the method results of the Iterable's elements instead of testing the elements themselves, it
* is especially useful for classes that do not conform to the Java Bean's getter specification (i.e. public String
* toString() or public String status() instead of public String getStatus()).
* <p>
* Let's take an example to make things clearer:
* <pre><code class='java'> // Build an array of WesterosHouse, a WesterosHouse has a method: public String sayTheWords()
* List<WesterosHouse> greatHouses = new ArrayList<WesterosHouse>();
* greatHouses.add(new WesterosHouse("Stark", "Winter is Coming"));
* greatHouses.add(new WesterosHouse("Lannister", "Hear Me Roar!"));
* greatHouses.add(new WesterosHouse("Greyjoy", "We Do Not Sow"));
* greatHouses.add(new WesterosHouse("Baratheon", "Our is the Fury"));
* greatHouses.add(new WesterosHouse("Martell", "Unbowed, Unbent, Unbroken"));
* greatHouses.add(new WesterosHouse("Tyrell", "Growing Strong"));
*
* // let's verify the words of the great houses of Westeros:
* assertThat(greatHouses).extractingResultOf("sayTheWords", String.class)
* .contains("Winter is Coming", "We Do Not Sow", "Hear Me Roar")
* .doesNotContain("Lannisters always pay their debts");</code></pre>
*
* Following requirements have to be met to extract method results:
* <ul>
* <li>method has to be public,</li>
* <li>method cannot accept any arguments,</li>
* <li>method cannot return void.</li>
* </ul>
* <p>
* Note that the order of extracted property/field values is consistent with the iteration order of the Iterable under
* test, for example if it's a {@link HashSet}, you won't be able to make any assumptions of the extracted values
* order.
*
* @param <P> the type of elements extracted.
* @param method the name of the method which result is to be extracted from the array under test
* @param extractedType type of element of the extracted List
* @return a new assertion object whose object under test is the Iterable of extracted values.
* @throws IllegalArgumentException if no method exists with the given name, or method is not public, or method does
* return void or method accepts arguments.
*/
@CheckReturnValue
public <P> AbstractListAssert<?, List<? extends P>, P, ObjectAssert<P>> extractingResultOf(String method,
Class<P> extractedType) {
List<P> values = (List<P>) FieldsOrPropertiesExtractor.extract(actual, resultOf(method));
String extractedDescription = extractedDescriptionOfMethod(method);
String description = mostRelevantDescription(info.description(), extractedDescription);
return newListAssertInstanceForMethodsChangingElementType(values).as(description);
}
/**
* Extract the values of given field or property from the Iterable's elements under test into a new Iterable, this new
* Iterable becoming the Iterable under test.
* <p>
* It allows you to test a property/field of the Iterable's elements instead of testing the elements themselves,
* which can be much less work!
* <p>
* Let's take an example to make things clearer:
* <pre><code class='java'> // Build a list of TolkienCharacter, a TolkienCharacter has a name, and age and a Race (a specific class)
* // they can be public field or properties, both can be extracted.
* List<TolkienCharacter> fellowshipOfTheRing = new ArrayList<TolkienCharacter>();
*
* fellowshipOfTheRing.add(new TolkienCharacter("Frodo", 33, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Sam", 38, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gandalf", 2020, MAIA));
* fellowshipOfTheRing.add(new TolkienCharacter("Legolas", 1000, ELF));
* fellowshipOfTheRing.add(new TolkienCharacter("Pippin", 28, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gimli", 139, DWARF));
* fellowshipOfTheRing.add(new TolkienCharacter("Aragorn", 87, MAN);
* fellowshipOfTheRing.add(new TolkienCharacter("Boromir", 37, MAN));
*
* // let's verify the names of TolkienCharacter in fellowshipOfTheRing:
* assertThat(fellowshipOfTheRing).extracting("name", String.class)
* .contains("Boromir", "Gandalf", "Frodo")
* .doesNotContain("Sauron", "Elrond");
*
* // you can extract nested property/field like the name of Race:
* assertThat(fellowshipOfTheRing).extracting("race.name", String.class)
* .contains("Hobbit", "Elf")
* .doesNotContain("Orc");</code></pre>
*
* A property with the given name is looked for first, if it doesn't exist then a field with the given name is looked
* for, if the field does not exist an {@link IntrospectionError} is thrown, by default private fields are read but,
* you can change this with {@link Assertions#setAllowComparingPrivateFields(boolean)}, trying to read a private field
* when it's not allowed leads to an {@link IntrospectionError}.
* <p>
* Note that the order of extracted property/field values is consistent with the iteration order of the Iterable under
* test, for example if it's a {@link HashSet}, you won't be able to make any assumptions on the extracted values
* order.
* <hr>
* <p>
* Extracting also support maps, that is, instead of extracting values from an Object, it extract maps values
* corresponding to the given keys.
* <p>
* Example:
* <pre><code class='java'> Employee yoda = new Employee(1L, new Name("Yoda"), 800);
* Employee luke = new Employee(2L, new Name("Luke"), 22);
* Employee han = new Employee(3L, new Name("Han"), 31);
*
* // build two maps
* Map<String, Employee> map1 = new HashMap<>();
* map1.put("key1", yoda);
* map1.put("key2", luke);
*
* Map<String, Employee> map2 = new HashMap<>();
* map2.put("key1", yoda);
* map2.put("key2", han);
*
* // instead of a list of objects, we have a list of maps
* List<Map<String, Employee>> maps = asList(map1, map2);
*
* // extracting a property in that case = get values from maps using property as a key
* assertThat(maps).extracting(key2, Employee.class).containsExactly(luke, han);
*
* // non type safe version
* assertThat(maps).extracting("key2").containsExactly(luke, han);
* assertThat(maps).extracting("key1").containsExactly(yoda, yoda);
*
* // it works with several keys, extracted values being wrapped in a Tuple
* assertThat(maps).extracting("key1", "key2").containsExactly(tuple(yoda, luke), tuple(yoda, han));
*
* // unknown keys leads to null (map behavior)
* assertThat(maps).extracting("bad key").containsExactly(null, null);</code></pre>
*
* @param <P> the type of elements extracted.
* @param propertyOrField the property/field to extract from the Iterable under test
* @param extractingType type to return
* @return a new assertion object whose object under test is the list of extracted property/field values.
* @throws IntrospectionError if no field or property exists with the given name in one of the initial
* Iterable's element.
*/
@CheckReturnValue
public <P> AbstractListAssert<?, List<? extends P>, P, ObjectAssert<P>> extracting(String propertyOrField,
Class<P> extractingType) {
List<P> values = (List<P>) FieldsOrPropertiesExtractor.extract(actual, byName(propertyOrField));
String extractedDescription = extractedDescriptionOf(propertyOrField);
String description = mostRelevantDescription(info.description(), extractedDescription);
return newListAssertInstanceForMethodsChangingElementType(values).as(description);
}
/**
* Extract the values of the given fields/properties from the Iterable's elements under test into a new Iterable composed
* of Tuples (a simple data structure), this new Iterable becoming the Iterable under test.
* <p>
* It allows you to test fields/properties of the Iterable's elements instead of testing the elements themselves,
* which can be much less work!
* <p>
* The Tuple data corresponds to the extracted values of the given fields/properties, for instance if you ask to
* extract "id", "name" and "email" then each Tuple data will be composed of id, name and email extracted from the
* element of the initial Iterable (the Tuple's data order is the same as the given fields/properties order).
* <p>
* Let's take an example to make things clearer:
* <pre><code class='java'> // Build a list of TolkienCharacter, a TolkienCharacter has a name, and age and a Race (a specific class)
* // they can be public field or properties, both can be extracted.
* List<TolkienCharacter> fellowshipOfTheRing = new ArrayList<TolkienCharacter>();
*
* fellowshipOfTheRing.add(new TolkienCharacter("Frodo", 33, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Sam", 38, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gandalf", 2020, MAIA));
* fellowshipOfTheRing.add(new TolkienCharacter("Legolas", 1000, ELF));
* fellowshipOfTheRing.add(new TolkienCharacter("Pippin", 28, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gimli", 139, DWARF));
* fellowshipOfTheRing.add(new TolkienCharacter("Aragorn", 87, MAN);
* fellowshipOfTheRing.add(new TolkienCharacter("Boromir", 37, MAN));
*
* // let's verify 'name' and 'age' of some TolkienCharacter in fellowshipOfTheRing:
* assertThat(fellowshipOfTheRing).extracting("name", "age")
* .contains(tuple("Boromir", 37),
* tuple("Sam", 38),
* tuple("Legolas", 1000));
*
*
* // extract 'name', 'age' and Race name values:
* assertThat(fellowshipOfTheRing).extracting("name", "age", "race.name")
* .contains(tuple("Boromir", 37, "Man"),
* tuple("Sam", 38, "Hobbit"),
* tuple("Legolas", 1000, "Elf"));</code></pre>
*
* A property with the given name is looked for first, if it doesn't exist then a field with the given name is looked
* for, if the field does not exist an {@link IntrospectionError} is thrown, by default private fields are read but,
* you can change this with {@link Assertions#setAllowComparingPrivateFields(boolean)}, trying to read a private field
* when it's not allowed leads to an {@link IntrospectionError}.
* <p>
* Note that the order of extracted property/field values is consistent with the iteration order of the Iterable under
* test, for example if it's a {@link HashSet}, you won't be able to make any assumptions on the extracted values
* order.
* <hr>
* <p>
* Extracting also support maps, that is, instead of extracting values from an Object, it extract maps values
* corresponding to the given keys.
* <p>
* Example:
* <pre><code class='java'> Employee yoda = new Employee(1L, new Name("Yoda"), 800);
* Employee luke = new Employee(2L, new Name("Luke"), 22);
* Employee han = new Employee(3L, new Name("Han"), 31);
*
* // build two maps
* Map<String, Employee> map1 = new HashMap<>();
* map1.put("key1", yoda);
* map1.put("key2", luke);
*
* Map<String, Employee> map2 = new HashMap<>();
* map2.put("key1", yoda);
* map2.put("key2", han);
*
* // instead of a list of objects, we have a list of maps
* List<Map<String, Employee>> maps = asList(map1, map2);
*
* // extracting a property in that case = get values from maps using property as a key
* assertThat(maps).extracting("key2").containsExactly(luke, han);
* assertThat(maps).extracting("key1").containsExactly(yoda, yoda);
*
* // it works with several keys, extracted values being wrapped in a Tuple
* assertThat(maps).extracting("key1", "key2").containsExactly(tuple(yoda, luke), tuple(yoda, han));
*
* // unknown keys leads to null (map behavior)
* assertThat(maps).extracting("bad key").containsExactly(null, null);</code></pre>
*
* @param propertiesOrFields the properties/fields to extract from the elements of the Iterable under test
* @return a new assertion object whose object under test is the list of Tuple with extracted properties/fields values
* as data.
* @throws IntrospectionError if one of the given name does not match a field or property in one of the initial
* Iterable's element.
*/
@CheckReturnValue
public AbstractListAssert<?, List<? extends Tuple>, Tuple, ObjectAssert<Tuple>> extracting(String... propertiesOrFields) {
List<Tuple> values = FieldsOrPropertiesExtractor.extract(actual, byName(propertiesOrFields));
String extractedDescription = extractedDescriptionOf(propertiesOrFields);
String description = mostRelevantDescription(info.description(), extractedDescription);
return newListAssertInstanceForMethodsChangingElementType(values).as(description);
}
/**
* Extract the values from Iterable's elements under test by applying an extracting function on them. The returned
* iterable becomes the instance under test.
* <p>
* It allows to test values from the elements more safely than by using {@link #extracting(String)}, as it
* doesn't utilize introspection.
* <p>
* Let's have a look at an example:
* <pre><code class='java'> // Build a list of TolkienCharacter, a TolkienCharacter has a name, and age and a Race (a specific class)
* // they can be public field or properties, both can be extracted.
* List<TolkienCharacter> fellowshipOfTheRing = new ArrayList<TolkienCharacter>();
*
* fellowshipOfTheRing.add(new TolkienCharacter("Frodo", 33, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Sam", 38, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gandalf", 2020, MAIA));
* fellowshipOfTheRing.add(new TolkienCharacter("Legolas", 1000, ELF));
* fellowshipOfTheRing.add(new TolkienCharacter("Pippin", 28, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gimli", 139, DWARF));
* fellowshipOfTheRing.add(new TolkienCharacter("Aragorn", 87, MAN);
* fellowshipOfTheRing.add(new TolkienCharacter("Boromir", 37, MAN));
*
* // fellowship has hobbitses, right, my presioussss?
* assertThat(fellowshipOfTheRing).extracting(TolkienCharacter::getRace).contains(HOBBIT);</code></pre>
*
* Note that the order of extracted property/field values is consistent with the iteration order of the Iterable under
* test, for example if it's a {@link HashSet}, you won't be able to make any assumptions on the extracted values order.
*
* @param <V> the type of elements extracted.
* @param extractor the object transforming input object to desired one
* @return a new assertion object whose object under test is the list of values extracted
*/
@CheckReturnValue
public <V> AbstractListAssert<?, List<? extends V>, V, ObjectAssert<V>> extracting(Function<? super ELEMENT, V> extractor) {
return internalExtracting(extractor);
}
private <V> AbstractListAssert<?, List<? extends V>, V, ObjectAssert<V>> internalExtracting(Function<? super ELEMENT, V> extractor) {
if (actual == null) throwAssertionError(shouldNotBeNull());
List<V> values = FieldsOrPropertiesExtractor.extract(actual, extractor);
return newListAssertInstanceForMethodsChangingElementType(values);
}
/**
* Maps the Iterable's elements under test by applying a mapping function, the resulting list becomes the instance under test.
* <p>
* This allows to test values from the elements more safely than by using {@link #extracting(String)}.
* <p>
* Let's have a look at an example:
* <pre><code class='java'> // Build a list of TolkienCharacter, a TolkienCharacter has a name, and age and a Race (a specific class)
* List<TolkienCharacter> fellowshipOfTheRing = new ArrayList<TolkienCharacter>();
*
* fellowshipOfTheRing.add(new TolkienCharacter("Frodo", 33, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Sam", 38, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gandalf", 2020, MAIA));
* fellowshipOfTheRing.add(new TolkienCharacter("Legolas", 1000, ELF));
* fellowshipOfTheRing.add(new TolkienCharacter("Pippin", 28, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gimli", 139, DWARF));
* fellowshipOfTheRing.add(new TolkienCharacter("Aragorn", 87, MAN);
* fellowshipOfTheRing.add(new TolkienCharacter("Boromir", 37, MAN));
*
* // fellowship has hobbitses, right, my precioussss?
* assertThat(fellowshipOfTheRing).map(TolkienCharacter::getRace)
* .contains(HOBBIT);</code></pre>
*
* Note that the order of mapped values is consistent with the order of the Iterable under test, for example if
* it's a {@link HashSet}, you won't be able to make any assumptions on the extracted values order.
*
* @param <V> the type of elements resulting of the map operation.
* @param mapper the {@link Function} transforming input object to desired one
* @return a new assertion object whose object under test is the list of values extracted
* @since 3.19.0
*/
public <V> AbstractListAssert<?, List<? extends V>, V, ObjectAssert<V>> map(Function<? super ELEMENT, V> mapper) {
return internalExtracting(mapper);
}
/**
* Extract the values from Iterable's elements under test by applying an extracting function (which might throw an
* exception) on them. The returned iterable becomes the instance under test.
* <p>
* Any checked exception raised in the extractor is rethrown wrapped in a {@link RuntimeException}.
* <p>
* It allows to test values from the elements more safely than by using {@link #extracting(String)}, as it
* doesn't utilize introspection.
* <p>
* Let's have a look at an example:
* <pre><code class='java'> // Build a list of TolkienCharacter, a TolkienCharacter has a name, and age and a Race (a specific class)
* // they can be public field or properties, both can be extracted.
* List<TolkienCharacter> fellowshipOfTheRing = new ArrayList<TolkienCharacter>();
*
* fellowshipOfTheRing.add(new TolkienCharacter("Frodo", 33, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Sam", 38, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gandalf", 2020, MAIA));
* fellowshipOfTheRing.add(new TolkienCharacter("Legolas", 1000, ELF));
* fellowshipOfTheRing.add(new TolkienCharacter("Pippin", 28, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gimli", 139, DWARF));
* fellowshipOfTheRing.add(new TolkienCharacter("Aragorn", 87, MAN);
* fellowshipOfTheRing.add(new TolkienCharacter("Boromir", 37, MAN));
*
* assertThat(fellowshipOfTheRing).extracting(input -> {
* if (input.getAge() < 20) {
* throw new Exception("age < 20");
* }
* return input.getName();
* }).contains("Frodo");</code></pre>
*
* Note that the order of extracted property/field values is consistent with the iteration order of the Iterable under
* test, for example if it's a {@link HashSet}, you won't be able to make any assumptions on the extracted values
* order.
*
* @param <EXCEPTION> the exception type of {@link ThrowingExtractor}
* @param <V> the type of elements extracted.
* @param extractor the object transforming input object to desired one
* @return a new assertion object whose object under test is the list of values extracted
* @since 3.7.0
*/
@CheckReturnValue
public <V, EXCEPTION extends Exception> AbstractListAssert<?, List<? extends V>, V, ObjectAssert<V>> extracting(ThrowingExtractor<? super ELEMENT, V, EXCEPTION> extractor) {
return internalExtracting(extractor);
}
/**
* Maps the Iterable's elements by applying the given mapping function (which might throw an exception), the returned list
* becomes the instance under test.
* <p>
* Any checked exception raised in the function is rethrown wrapped in a {@link RuntimeException}.
* <p>
* This allows to test values from the elements more safely than by using {@link #extracting(String)}.
* <p>
* Let's have a look at an example:
* <pre><code class='java'> // Build a list of TolkienCharacter, a TolkienCharacter has a name, and age and a Race (a specific class)
* List<TolkienCharacter> fellowshipOfTheRing = new ArrayList<TolkienCharacter>();
*
* fellowshipOfTheRing.add(new TolkienCharacter("Frodo", 33, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Sam", 38, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gandalf", 2020, MAIA));
* fellowshipOfTheRing.add(new TolkienCharacter("Legolas", 1000, ELF));
* fellowshipOfTheRing.add(new TolkienCharacter("Pippin", 28, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gimli", 139, DWARF));
* fellowshipOfTheRing.add(new TolkienCharacter("Aragorn", 87, MAN);
* fellowshipOfTheRing.add(new TolkienCharacter("Boromir", 37, MAN));
*
* assertThat(fellowshipOfTheRing).map(input -> {
* if (input.getAge() < 20) {
* throw new Exception("age < 20");
* }
* return input.getName();
* }).contains("Frodo");</code></pre>
*
* Note that the order of mapped values is consistent with the order of the Iterable under test, for example if it's a
* {@link HashSet}, you won't be able to make any assumptions on the extracted values order.
*
* @param <EXCEPTION> the exception type of {@link ThrowingExtractor}
* @param <V> the type of elements extracted.
* @param mapper the function transforming input object to desired one
* @return a new assertion object whose object under test is the list of values extracted
* @since 3.19.0
*/
@CheckReturnValue
public <V, EXCEPTION extends Exception> AbstractListAssert<?, List<? extends V>, V, ObjectAssert<V>> map(ThrowingExtractor<? super ELEMENT, V, EXCEPTION> mapper) {
return internalExtracting(mapper);
}
/*
* Should be used after any methods changing the elements type like {@link #extracting(Function)} as it will propagate the
* correct assertions state, that is everything but the element comparator (since the element type has changed).
*/
private <V> AbstractListAssert<?, List<? extends V>, V, ObjectAssert<V>> newListAssertInstanceForMethodsChangingElementType(List<V> values) {
if (actual instanceof SortedSet) {
// Reset the natural element comparator set when building an iterable assert instance for a SortedSet as it is likely not
// compatible with extracted values type, example with a SortedSet<Person> using a comparator on the Person's age, after
// extracting names we get a List<String> which is mot suitable for the age comparator
usingDefaultElementComparator();
}
return newListAssertInstance(values).withAssertionState(myself);
}
/**
* Extracts Iterable elements values by applying a function and concatenates the result into a list that becomes the instance
* under test.
* <p>
* Example:
* <pre><code class='java'> CartoonCharacter bart = new CartoonCharacter("Bart Simpson");
* CartoonCharacter lisa = new CartoonCharacter("Lisa Simpson");
* CartoonCharacter maggie = new CartoonCharacter("Maggie Simpson");
*
* CartoonCharacter homer = new CartoonCharacter("Homer Simpson");
* homer.getChildren().add(bart);
* homer.getChildren().add(lisa);
* homer.getChildren().add(maggie);
*
* CartoonCharacter pebbles = new CartoonCharacter("Pebbles Flintstone");
* CartoonCharacter fred = new CartoonCharacter("Fred Flintstone");
* fred.getChildren().add(pebbles);
*
* List<CartoonCharacter> parents = list(homer, fred);
*
* // check children property which is a List<CartoonCharacter>
* assertThat(parents).flatExtracting(CartoonCharacter::getChildren)
* .containsOnly(bart, lisa, maggie, pebbles);</code></pre>
*
* The extracted values order is consistent with both the order of the iterable itself and the extracted collections.
*
* @param <V> the type of extracted elements.
* @param extractor the {@link Function} transforming input object to an {@code Iterable} of desired ones
* @return a new assertion object whose object under test is the list of values extracted
*/
@CheckReturnValue
public <V> AbstractListAssert<?, List<? extends V>, V, ObjectAssert<V>> flatExtracting(Function<? super ELEMENT, ? extends Collection<V>> extractor) {
return doFlatExtracting(extractor);
}
/**
* Maps the Iterable's elements under test by applying the given {@link Function} and flattens the resulting collections in a
* list becoming the object under test.
* <p>
* Example:
* <pre><code class='java'> CartoonCharacter bart = new CartoonCharacter("Bart Simpson");
* CartoonCharacter lisa = new CartoonCharacter("Lisa Simpson");
* CartoonCharacter maggie = new CartoonCharacter("Maggie Simpson");
*
* CartoonCharacter homer = new CartoonCharacter("Homer Simpson");
* homer.getChildren().add(bart);
* homer.getChildren().add(lisa);
* homer.getChildren().add(maggie);
*
* CartoonCharacter pebbles = new CartoonCharacter("Pebbles Flintstone");
* CartoonCharacter fred = new CartoonCharacter("Fred Flintstone");
* fred.getChildren().add(pebbles);
*
* List<CartoonCharacter> parents = list(homer, fred);
*
* // check children property which is a List<CartoonCharacter>
* assertThat(parents).flatMap(CartoonCharacter::getChildren)
* .containsOnly(bart, lisa, maggie, pebbles);</code></pre>
*
* The mapped values order is consistent with both the order of the iterable itself and the mapped collections.
*
* @param <V> the type of mapped elements.
* @param mapper the {@link Function} transforming input object to an {@code Iterable} of desired ones
* @return a new assertion object whose object under test is the list of values extracted
* @since 3.19.0
*/
@CheckReturnValue
public <V> AbstractListAssert<?, List<? extends V>, V, ObjectAssert<V>> flatMap(Function<? super ELEMENT, ? extends Collection<V>> mapper) {
return doFlatExtracting(mapper);
}
/**
* Extracts Iterable elements values by applying a function (which might throw a checked exception) on them and
* concatenates/flattens the result into a single list that becomes the instance under test.
* <p>
* Example:
* <pre><code class='java'> CartoonCharacter bart = new CartoonCharacter("Bart Simpson");
* CartoonCharacter lisa = new CartoonCharacter("Lisa Simpson");
* CartoonCharacter maggie = new CartoonCharacter("Maggie Simpson");
*
* CartoonCharacter homer = new CartoonCharacter("Homer Simpson");
* homer.getChildren().add(bart);
* homer.getChildren().add(lisa);
* homer.getChildren().add(maggie);
*
* CartoonCharacter pebbles = new CartoonCharacter("Pebbles Flintstone");
* CartoonCharacter fred = new CartoonCharacter("Fred Flintstone");
* fred.getChildren().add(pebbles);
*
* List<CartoonCharacter> parents = list(homer, fred);
*
* // check children property where getChildren() can throw an Exception!
* assertThat(parents).flatExtracting(CartoonCharacter::getChildren)
* .containsOnly(bart, lisa, maggie, pebbles);</code></pre>
*
* The extracted values order is consistent with both the order of the iterable itself and the extracted collections.
*
* @param <V> the type of extracted values.
* @param <EXCEPTION> the exception type of {@link ThrowingExtractor}
* @param extractor the object transforming input object to an {@code Iterable} of desired ones
* @return a new assertion object whose object under test is the list of values extracted
* @since 3.7.0
*/
@CheckReturnValue
public <V, EXCEPTION extends Exception> AbstractListAssert<?, List<? extends V>, V, ObjectAssert<V>> flatExtracting(ThrowingExtractor<? super ELEMENT, ? extends Collection<V>, EXCEPTION> extractor) {
return doFlatExtracting(extractor);
}
/**
* Maps the Iterable's elements under test by applying a mapping function (which might throw a checked exception) and
* concatenates/flattens the result into a single list that becomes the instance under test.
* <p>
* Example:
* <pre><code class='java'> CartoonCharacter bart = new CartoonCharacter("Bart Simpson");
* CartoonCharacter lisa = new CartoonCharacter("Lisa Simpson");
* CartoonCharacter maggie = new CartoonCharacter("Maggie Simpson");
*
* CartoonCharacter homer = new CartoonCharacter("Homer Simpson");
* homer.getChildren().add(bart);
* homer.getChildren().add(lisa);
* homer.getChildren().add(maggie);
*
* CartoonCharacter pebbles = new CartoonCharacter("Pebbles Flintstone");
* CartoonCharacter fred = new CartoonCharacter("Fred Flintstone");
* fred.getChildren().add(pebbles);
*
* List<CartoonCharacter> parents = list(homer, fred);
*
* // check children property where getChildren() can throw an Exception!
* assertThat(parents).flatMap(CartoonCharacter::getChildren)
* .containsOnly(bart, lisa, maggie, pebbles);</code></pre>
*
* The mapped values order is consistent with both the order of the iterable itself and the mapped collections.
*
* @param <V> the type of mapped values.
* @param <EXCEPTION> the exception type of {@link ThrowingExtractor}
* @param mapper the object transforming input object to an {@code Iterable} of desired ones
* @return a new assertion object whose object under test is the list of values extracted
* @since 3.19.0
*/
@CheckReturnValue
public <V, EXCEPTION extends Exception> AbstractListAssert<?, List<? extends V>, V, ObjectAssert<V>> flatMap(ThrowingExtractor<? super ELEMENT, ? extends Collection<V>, EXCEPTION> mapper) {
return doFlatExtracting(mapper);
}
private <V> AbstractListAssert<?, List<? extends V>, V, ObjectAssert<V>> doFlatExtracting(Function<? super ELEMENT, ? extends Collection<V>> extractor) {
List<V> result = FieldsOrPropertiesExtractor.extract(actual, extractor).stream()
.flatMap(Collection::stream)
.collect(toList());
return newListAssertInstanceForMethodsChangingElementType(result);
}
/**
* Extracts multiple values from each {@code Iterable}'s element according to the given {@code Function}s and
* concatenates/flattens them in a list that becomes the instance under test.
* <p>
* If extracted values were not flattened, instead of a simple list like (given 2 extractors):
* <pre> element1.value1, element1.value2, element2.value1, element2.value2, ... </pre>
* we would get a list of list like:
* <pre> list(element1.value1, element1.value2), list(element2.value1, element2.value2), ... </pre>
* <p>
* Example:
* <pre><code class='java'> // fellowshipOfTheRing is a List<TolkienCharacter>
*
* // values are extracted in order and flattened: age1, name1, age2, name2, age3 ...
* assertThat(fellowshipOfTheRing).flatExtracting(TolkienCharacter::getAge,
* TolkienCharacter::getName)
* .contains(33 ,"Frodo",
* 1000, "Legolas",
* 87, "Aragorn");</code></pre>
*
* The resulting extracted values list is ordered by {@code Iterable}'s element first and then extracted values,
* this is why is in the example age values come before names.
*
* @param extractors all the extractors to apply on each actual {@code Iterable}'s elements
* @return a new assertion object whose object under test is a flattened list of all extracted values.
*/
@CheckReturnValue
@SafeVarargs
public final AbstractListAssert<?, List<?>, Object, ObjectAssert<Object>> flatExtracting(Function<? super ELEMENT, ?>... extractors) {
return flatExtractingForProxy(extractors);
}
/**
* Maps multiple values from each {@code Iterable}'s element according to the given {@code Function}s
* and concatenates/flattens them in a list that becomes the instance under test.
* <p>
* If mapped values were not flattened, instead of a simple list like (given 2 extractors):
* <pre> element1.value1, element1.value2, element2.value1, element2.value2, ... </pre>
* we would get a list of list like:
* <pre> list(element1.value1, element1.value2), list(element2.value1, element2.value2), ... </pre>
* <p>
* Example:
* <pre><code class='java'> // fellowshipOfTheRing is a List<TolkienCharacter>
*
* // values are extracted in order and flattened: age1, name1, age2, name2, age3 ...
* assertThat(fellowshipOfTheRing).flatMap(TolkienCharacter::getAge,
* TolkienCharacter::getName)
* .contains(33 ,"Frodo",
* 1000, "Legolas",
* 87, "Aragorn");</code></pre>
*
* The resulting mapped values list is ordered by {@code Iterable}'s element first and then mapped values, this is why is
* in the example age values come before names.
*
* @param mappers all the mappers to apply on each actual {@code Iterable}'s elements
* @return a new assertion object whose object under test is a flattened list of all mapped values.
* @since 3.19.0
*/
@CheckReturnValue
@SafeVarargs
public final AbstractListAssert<?, List<?>, Object, ObjectAssert<Object>> flatMap(Function<? super ELEMENT, ?>... mappers) {
return flatExtractingForProxy(mappers);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected AbstractListAssert<?, List<?>, Object, ObjectAssert<Object>> flatExtractingForProxy(Function<? super ELEMENT, ?>[] extractors) {
if (actual == null) throwAssertionError(shouldNotBeNull());
Stream<? extends ELEMENT> actualStream = stream(actual.spliterator(), false);
List<Object> result = actualStream.flatMap(element -> Stream.of(extractors).map(extractor -> extractor.apply(element)))
.collect(toList());
return newListAssertInstanceForMethodsChangingElementType(result);
}
/**
* Extracts multiple values from each {@code Iterable}'s element according to the given {@link ThrowingExtractor}s
* and concatenates/flattens them in a list that becomes the object under test.
* <p>
* If extracted values were not flattened, instead of a simple list like (given 2 extractors):
* <pre> element1.value1, element1.value2, element2.value1, element2.value2, ... </pre>
* we would get a list of list like:
* <pre> list(element1.value1, element1.value2), list(element2.value1, element2.value2), ... </pre>
* <p>
* Example:
* <pre><code class='java'> // fellowshipOfTheRing is a List<TolkienCharacter>
*
* // values are extracted in order and flattened: age1, name1, age2, name2, age3 ...
* assertThat(fellowshipOfTheRing).flatExtracting(input -> {
* if (input.getAge() < 20) {
* throw new Exception("age < 20");
* }
* return input.getName();
* }, input2 -> {
* if (input2.getAge() < 20) {
* throw new Exception("age < 20");
* }
* return input2.getAge();
* }).contains(33 ,"Frodo",
* 1000, "Legolas",
* 87, "Aragorn");</code></pre>
*
* The resulting extracted values list is ordered by {@code Iterable}'s element first and then extracted values,
* this is why is in the example age values come before names.
*
* @param <EXCEPTION> the exception type of {@link ThrowingExtractor}
* @param extractors all the extractors to apply on each actual {@code Iterable}'s elements
* @return a new assertion object whose object under test is a flattened list of all extracted values.
* @since 3.7.0
*/
@CheckReturnValue
@SafeVarargs
public final <EXCEPTION extends Exception> AbstractListAssert<?, List<?>, Object, ObjectAssert<Object>> flatExtracting(ThrowingExtractor<? super ELEMENT, ?, EXCEPTION>... extractors) {
return flatExtractingForProxy(extractors);
}
/**
* Maps multiple values from each {@code Iterable}'s element according to the given {@link ThrowingExtractor}s and
* concatenates/flattens them in a list that becomes the object under test.
* <p>
* If mapped values were not flattened, instead of a simple list like (given 2 mappers):
* <pre> element1.value1, element1.value2, element2.value1, element2.value2, ... </pre>
* we would get a list of list like:
* <pre> list(element1.value1, element1.value2), list(element2.value1, element2.value2), ... </pre>
* <p>
* Example:
* <pre><code class='java'> // fellowshipOfTheRing is a List<TolkienCharacter>
*
* // values are extracted in order and flattened: age1, name1, age2, name2, age3 ...
* assertThat(fellowshipOfTheRing).flatMap(input -> {
* if (input.getAge() < 20) {
* throw new Exception("age < 20");
* }
* return input.getName();
* }, input2 -> {
* if (input2.getAge() < 20) {
* throw new Exception("age < 20");
* }
* return input2.getAge();
* }).contains(33 ,"Frodo",
* 1000, "Legolas",
* 87, "Aragorn");</code></pre>
*
* The resulting mapped values list is ordered by {@code Iterable}'s element first and then mapped values, this is why is in
* the example age values come before names.
*
* @param <EXCEPTION> the exception type of {@link ThrowingExtractor}
* @param mappers all the mappers to apply on each actual {@code Iterable}'s elements
* @return a new assertion object whose object under test is a flattened list of all extracted values.
* @since 3.19.0
*/
@CheckReturnValue
@SafeVarargs
public final <EXCEPTION extends Exception> AbstractListAssert<?, List<?>, Object, ObjectAssert<Object>> flatMap(ThrowingExtractor<? super ELEMENT, ?, EXCEPTION>... mappers) {
return flatExtractingForProxy(mappers);
}
/**
* Extract Iterable's elements values corresponding to the given property/field name and concatenates them into a list becoming
* the new instance under test.
* <p>
* This allows testing the elements extracted values that are iterables or arrays.
* <p>
* For example:
* <pre><code class='java'> CartoonCharacter bart = new CartoonCharacter("Bart Simpson");
* CartoonCharacter lisa = new CartoonCharacter("Lisa Simpson");
* CartoonCharacter maggie = new CartoonCharacter("Maggie Simpson");
* CartoonCharacter homer = new CartoonCharacter("Homer Simpson");
* homer.getChildren().add(bart);
* homer.getChildren().add(lisa);
* homer.getChildren().add(maggie);
*
* CartoonCharacter pebbles = new CartoonCharacter("Pebbles Flintstone");
* CartoonCharacter fred = new CartoonCharacter("Fred Flintstone");
* fred.getChildren().add(pebbles);
*
* List<CartoonCharacter> parents = list(homer, fred);
*
* // check children which is a List<CartoonCharacter>
* assertThat(parents).flatExtracting("children")
* .containsOnly(bart, lisa, maggie, pebbles);</code></pre>
*
* The order of extracted values is consisted with both the order of the iterable itself and the extracted collections.
*
* @param fieldOrPropertyName the object transforming input object to an Iterable of desired ones
* @return a new assertion object whose object under test is the list of values extracted
* @throws IllegalArgumentException if one of the extracted property value was not an array or an iterable.
*/
@CheckReturnValue
public AbstractListAssert<?, List<?>, Object, ObjectAssert<Object>> flatExtracting(String fieldOrPropertyName) {
List<Object> extractedValues = newArrayList();
List<?> extractedGroups = FieldsOrPropertiesExtractor.extract(actual, byName(fieldOrPropertyName));
for (Object group : extractedGroups) {
// expecting group to be an iterable or an array
if (isArray(group)) {
int size = Array.getLength(group);
for (int i = 0; i < size; i++) {
extractedValues.add(Array.get(group, i));
}
} else if (group instanceof Iterable<?> iterable) {
for (Object value : iterable) {
extractedValues.add(value);
}
} else {
CommonErrors.wrongElementTypeForFlatExtracting(group);
}
}
return newListAssertInstanceForMethodsChangingElementType(extractedValues);
}
/**
* Use the given {@link Function}s to extract the values from the {@link Iterable}'s elements into a new {@link Iterable}
* composed of {@link Tuple}s (a simple data structure containing the extracted values), this new {@link Iterable} becoming the
* object under test.
* <p>
* It allows you to test values from the {@link Iterable}'s elements instead of testing the elements themselves, which sometimes can be
* much less work!
* <p>
* The {@link Tuple} data correspond to the extracted values from the Iterable's elements, for instance if you pass functions
* extracting "id", "name" and "email" values then each Tuple data will be composed of an id, a name and an email
* extracted from the element of the initial Iterable (the Tuple's data order is the same as the given functions order).
* <p>
* Let's take a look at an example to make things clearer:
* <pre><code class='java'> // Build a list of TolkienCharacter, a TolkienCharacter has a name, and age and a Race (a specific class)
* // they can be public field or properties, both can be extracted.
* List<TolkienCharacter> fellowshipOfTheRing = new ArrayList<TolkienCharacter>();
*
* fellowshipOfTheRing.add(new TolkienCharacter("Frodo", 33, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Sam", 38, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gandalf", 2020, MAIA));
* fellowshipOfTheRing.add(new TolkienCharacter("Legolas", 1000, ELF));
* fellowshipOfTheRing.add(new TolkienCharacter("Pippin", 28, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gimli", 139, DWARF));
* fellowshipOfTheRing.add(new TolkienCharacter("Aragorn", 87, MAN);
* fellowshipOfTheRing.add(new TolkienCharacter("Boromir", 37, MAN));
*
* // let's verify 'name', 'age' and Race of some TolkienCharacter in fellowshipOfTheRing:
* assertThat(fellowshipOfTheRing).extracting(TolkienCharacter::getName,
* character -> character.getAge(),
* TolkienCharacter::getRace)
* .containsOnly(tuple("Frodo", 33, HOBBIT),
* tuple("Sam", 38, HOBBIT),
* tuple("Gandalf", 2020, MAIA),
* tuple("Legolas", 1000, ELF),
* tuple("Pippin", 28, HOBBIT),
* tuple("Gimli", 139, DWARF),
* tuple("Aragorn", 87, MAN),
* tuple("Boromir", 37, MAN));</code></pre>
* You can use lambda expression or a method reference to extract the expected values.
* <p>
* Use {@link Tuple#tuple(Object...)} to initialize the expected values.
* <p>
* Note that the order of the extracted tuples list is consistent with the iteration order of the Iterable under test,
* for example if it's a {@link HashSet}, you won't be able to make any assumptions on the extracted tuples order.
*
* @param extractors the extractor functions to extract a value from an element of the Iterable under test.
* @return a new assertion object whose object under test is the list of Tuples containing the extracted values.
*/
@CheckReturnValue
@SafeVarargs
public final AbstractListAssert<?, List<? extends Tuple>, Tuple, ObjectAssert<Tuple>> extracting(Function<? super ELEMENT, ?>... extractors) {
return extractingForProxy(extractors);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected AbstractListAssert<?, List<? extends Tuple>, Tuple, ObjectAssert<Tuple>> extractingForProxy(Function<? super ELEMENT, ?>[] extractors) {
if (actual == null) throwAssertionError(shouldNotBeNull());
// combine all extractors into one function
Function<ELEMENT, Tuple> tupleExtractor = objectToExtractValueFrom -> new Tuple(Stream.of(extractors)
.map(extractor -> extractor.apply(objectToExtractValueFrom))
.toArray());
List<Tuple> tuples = stream(actual.spliterator(), false).map(tupleExtractor).collect(toList());
return newListAssertInstanceForMethodsChangingElementType(tuples);
}
/**
* Use the given {@link Function}s to map the {@link Iterable}'s elements into a {@link List} of {@link Tuple}s
* (a simple data structure containing the mapped values), this new list becoming the object under test.
* <p>
* This allows you to test values from the {@link Iterable}'s elements instead of testing the elements themselves, which
* sometimes can be much less work!
* <p>
* The {@link Tuple} data correspond to the extracted values from the Iterable's elements, for instance if you pass functions
* mapping "id", "name" and "email" values then each {@code Tuple} data will be composed of an id, a name and an email
* mapped from the element of the initial Iterable (the Tuple's data order is the same as the given functions order).
* <p>
* Let's take a look at an example to make things clearer:
* <pre><code class='java'> // Build a list of TolkienCharacter, a TolkienCharacter has a name, and age and a Race (a specific class)
* // they can be public field or properties, both can be extracted.
* List<TolkienCharacter> fellowshipOfTheRing = new ArrayList<TolkienCharacter>();
*
* fellowshipOfTheRing.add(new TolkienCharacter("Frodo", 33, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Sam", 38, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gandalf", 2020, MAIA));
* fellowshipOfTheRing.add(new TolkienCharacter("Legolas", 1000, ELF));
* fellowshipOfTheRing.add(new TolkienCharacter("Pippin", 28, HOBBIT));
* fellowshipOfTheRing.add(new TolkienCharacter("Gimli", 139, DWARF));
* fellowshipOfTheRing.add(new TolkienCharacter("Aragorn", 87, MAN);
* fellowshipOfTheRing.add(new TolkienCharacter("Boromir", 37, MAN));
*
* // let's verify 'name', 'age' and Race of some TolkienCharacter in fellowshipOfTheRing:
* assertThat(fellowshipOfTheRing).map(TolkienCharacter::getName,
* character -> character.getAge(),
* TolkienCharacter::getRace)
* .containsOnly(tuple("Frodo", 33, HOBBIT),
* tuple("Sam", 38, HOBBIT),
* tuple("Gandalf", 2020, MAIA),
* tuple("Legolas", 1000, ELF),
* tuple("Pippin", 28, HOBBIT),
* tuple("Gimli", 139, DWARF),
* tuple("Aragorn", 87, MAN),
* tuple("Boromir", 37, MAN));</code></pre>
* You can use lambda expression or a method reference to extract the expected values.
* <p>
* Use {@link Tuple#tuple(Object...)} to initialize the expected values.
* <p>
* Note that the order of the extracted tuples list is consistent with the iteration order of the Iterable under test,
* for example if it's a {@link HashSet}, you won't be able to make any assumptions on the extracted tuples order.
*
* @param mappers the mapper functions to extract a value from an element of the Iterable under test.
* @return a new assertion object whose object under test is the list of Tuples containing the extracted values.
* @since 3.19.0
*/
@CheckReturnValue
@SafeVarargs
public final AbstractListAssert<?, List<? extends Tuple>, Tuple, ObjectAssert<Tuple>> map(Function<? super ELEMENT, ?>... mappers) {
return extractingForProxy(mappers);
}
/**
* Extract the given property/field values from each {@code Iterable}'s element and
* flatten the extracted values in a list that is used as the new object under test.
* <p>
* Given 2 properties, if the extracted values were not flattened, instead having a simple list like:
* <pre> element1.value1, element1.value2, element2.value1, element2.value2, ... </pre>
* ... we would get a list of list:
* <pre> list(element1.value1, element1.value2), list(element2.value1, element2.value2), ... </pre>
* <p>
* Example:
* <pre><code class='java'> // fellowshipOfTheRing is a List<TolkienCharacter>
*
* // values are extracted in order and flattened: age1, name1, age2, name2, age3 ...
* assertThat(fellowshipOfTheRing).flatExtracting("age", "name")
* .contains(33 ,"Frodo",
* 1000, "Legolas",
* 87, "Aragorn");</code></pre>
*
* @param fieldOrPropertyNames the field and/or property names to extract from each actual {@code Iterable}'s element
* @return a new assertion object whose object under test is a flattened list of all extracted values.
* @throws IllegalArgumentException if fieldOrPropertyNames vararg is null or empty
* @since 2.5.0 / 3.5.0
*/
@CheckReturnValue
public AbstractListAssert<?, List<?>, Object, ObjectAssert<Object>> flatExtracting(String... fieldOrPropertyNames) {
List<Object> extractedValues = FieldsOrPropertiesExtractor.extract(actual, byName(fieldOrPropertyNames)).stream()
.flatMap(tuple -> tuple.toList().stream())
.collect(toList());
return newListAssertInstanceForMethodsChangingElementType(extractedValues);
}
/**
* {@inheritDoc}
*/
@Override
public SELF containsExactlyElementsOf(Iterable<? extends ELEMENT> iterable) {
return containsExactly(toArray(iterable));
}
/**
* {@inheritDoc}
*/
@Override
public SELF containsOnlyOnceElementsOf(Iterable<? extends ELEMENT> iterable) {
return containsOnlyOnce(toArray(iterable));
}
/**
* {@inheritDoc}
*/
@Override
public SELF hasSameElementsAs(Iterable<? extends ELEMENT> iterable) {
// containsOnlyElementsOf is deprecated so we use its implementation
return containsOnly(toArray(iterable));
}
/**
* Allows to set a specific comparator for the given type of elements or their fields.
* <p>
* Example:
* <pre><code class='java'>
* // assertion will pass
* assertThat(asList("some", new BigDecimal("4.2")))
* .usingComparatorForType(BIG_DECIMAL_COMPARATOR, BigDecimal.class)
* .contains(new BigDecimal("4.20"));
* </code></pre>
*
* @param <T> the type of elements to compare.
* @param comparator the {@link java.util.Comparator} to use
* @param type the {@link java.lang.Class} of the type of the element or element fields the comparator should be used for
* @return {@code this} assertions object
* @since 2.9.0 / 3.9.0
*/
@CheckReturnValue
public <T> SELF usingComparatorForType(Comparator<T> comparator, Class<T> type) {
if (iterables.getComparator() == null) {
usingElementComparator(new ExtendedByTypesComparator(getComparatorsByType()));
}
getComparatorsForElementPropertyOrFieldTypes().registerComparator(type, comparator);
getComparatorsByType().registerComparator(type, comparator);
return myself;
}
/**
* Enable using a recursive field by field comparison strategy similar to {@link #usingRecursiveComparison()} but contrary to the latter <b>you can chain any iterable assertions after this method</b> (this is why this method exists).
* <p>
* This method uses the default {@link RecursiveComparisonConfiguration}, if you need to customize it use {@link #usingRecursiveFieldByFieldElementComparator(RecursiveComparisonConfiguration)} instead.
* <p>
* There are differences between this approach and {@link #usingRecursiveComparison()}:
* <ul>
* <li>contrary to {@link RecursiveComparisonAssert}, you can chain any iterable assertions after this method.</li>
* <li><b>no</b> comparators registered with {@link AbstractIterableAssert#usingComparatorForType(Comparator, Class)} will be used, you need to register them in the configuration object.</li>
* <li>the assertion errors won't be as detailed as {@link RecursiveComparisonAssert#isEqualTo(Object)} which shows the field differences.</li>
* </ul>
* <p>
* This last point makes sense, take the {@link #contains(Object...)} assertion, it would not be relevant to report the differences of all the iterable's elements differing from the values to look for.
* <p>
* Example:
* <pre><code class='java'> public record Person(String name, boolean hasPhd) {}
*
* Person drSheldon = new Person("Sheldon Cooper", true);
* Person drLeonard = new Person("Leonard Hofstadter", true);
* Person drRaj = new Person("Raj Koothrappali", true);
*
* Person sheldon = new Person("Sheldon Cooper", true);
* Person leonard = new Person("Leonard Hofstadter", true);
* Person raj = new Person("Raj Koothrappali", true);
* Person howard = new Person("Howard Wolowitz", true);
*
* List<Person> doctors = List.of(drSheldon, drLeonard, drRaj);
*
* // assertion succeeds as both lists contains equivalent items in order.
* assertThat(doctors).usingRecursiveFieldByFieldElementComparator()
* .containsExactlyElementsOf(List.of(sheldon, leonard, raj));
*
* // assertion fails because drLeonard and misspelledLeonard names are different.
* Person misspelledLeonard = new Person("Leonard Ofstater", true);
* assertThat(doctors).usingRecursiveFieldByFieldElementComparator()
* .contains(misspelledLeonard);
*
* // assertion fails because howard is missing and leonard is not expected.
* assertThat(doctors).usingRecursiveFieldByFieldElementComparator()
* .containsExactlyInAnyOrderElementsOf(List.of(howard, sheldon, raj));</code></pre>
* <p>
* Another point worth mentioning: <b>elements order does matter if the expected iterable is ordered</b>, for example comparing a {@code Set<Person>} to a {@code List<Person>} fails as {@code List} is ordered and {@code Set} is not.<br>
* The ordering can be ignored by calling {@link RecursiveComparisonAssert#ignoringCollectionOrder ignoringCollectionOrder} allowing ordered/unordered iterable comparison, note that {@link RecursiveComparisonAssert#ignoringCollectionOrder ignoringCollectionOrder} is applied recursively on any nested iterable fields, if this behavior is too generic,
* use the more fine-grained {@link RecursiveComparisonAssert#ignoringCollectionOrderInFields(String...) ignoringCollectionOrderInFields} or
* {@link RecursiveComparisonAssert#ignoringCollectionOrderInFieldsMatchingRegexes(String...) ignoringCollectionOrderInFieldsMatchingRegexes}.
*
* @return {@code this} assertion object.
* @since 2.5.0 / 3.5.0 - breaking change in 3.20.0
* @see RecursiveComparisonConfiguration
* @see #usingRecursiveFieldByFieldElementComparator(RecursiveComparisonConfiguration)
*/
@CheckReturnValue
public SELF usingRecursiveFieldByFieldElementComparator() {
return usingRecursiveFieldByFieldElementComparator(new RecursiveComparisonConfiguration(info.representation()));
}
/**
* Enable using a recursive field by field comparison strategy similar to {@link #usingRecursiveComparison()} but contrary to the latter <b>you can chain any iterable assertions after this method</b> (this is why this method exists).
* <p>
* The given {@link RecursiveComparisonConfiguration} is used to tweak the comparison behavior, for example by {@link RecursiveComparisonConfiguration#ignoreCollectionOrder(boolean) ignoring collection order}.
* <p>
* RecursiveComparisonConfiguration exposes a {@link RecursiveComparisonConfiguration.Builder builder} to ease setting the comparison behaviour,
* call {@link RecursiveComparisonConfiguration#builder() RecursiveComparisonConfiguration.builder()} to start building your configuration.
* <p>
* There are differences between this approach and {@link #usingRecursiveComparison()}:
* <ul>
* <li>contrary to {@link RecursiveComparisonAssert}, you can chain any iterable assertions after this method.</li>
* <li><b>no</b> comparators registered with {@link AbstractIterableAssert#usingComparatorForType(Comparator, Class)} will be used, you need to register them in the configuration object.</li>
* <li>the assertion errors won't be as detailed as {@link RecursiveComparisonAssert#isEqualTo(Object)} which shows the field differences.</li>
* </ul>
* <p>
* This last point makes sense, take the {@link #contains(Object...)} assertion, it would not be relevant to report the differences of all the iterable's elements differing from the values to look for.
* <p>
* Example:
* <pre><code class='java'> public record Person(String name, boolean hasPhd) {}
*
* Person drSheldon = new Person("Sheldon Cooper", true);
* Person drLeonard = new Person("Leonard Hofstadter", true);
* Person drRaj = new Person("Raj Koothrappali", true);
*
* Person sheldon = new Person("Sheldon Cooper", false);
* Person leonard = new Person("Leonard Hofstadter", false);
* Person raj = new Person("Raj Koothrappali", false);
* Person howard = new Person("Howard Wolowitz", false);
*
* List<Person> doctors = List.of(drSheldon, drLeonard, drRaj);
*
* var configuration = RecursiveComparisonConfiguration.builder().withIgnoredFields("hasPhd").build();
*
* // assertion succeeds: as both lists contains equivalent items in order since we ignored the hasPhd field
* assertThat(doctors).usingRecursiveFieldByFieldElementComparator(configuration)
* .containsExactlyElementsOf(List.of(sheldon, leonard, raj));
*
* // assertion fails because leonard names are different.
* Person misspelledLeonard = new Person("Leonard Ofstater", true);
* assertThat(doctors).usingRecursiveFieldByFieldElementComparator(configuration)
* .contains(leonard);
*
* // assertion fails because howard is missing and leonard is not expected.
* assertThat(doctors).usingRecursiveFieldByFieldElementComparator(configuration)
* .containsExactlyInAnyOrderElementsOf(List.of(howard, sheldon, raj));</code></pre>
*
* A detailed documentation for the recursive comparison is available here: <a href="https://assertj.github.io/doc/#assertj-core-recursive-comparison">https://assertj.github.io/doc/#assertj-core-recursive-comparison</a>.
* <p>
* A point worth mentioning: <b>elements order does matter if the expected iterable is ordered</b>, for example comparing a {@code Set<Person>} to a {@code List<Person>} fails as {@code List} is ordered and {@code Set} is not.<br>
* The ordering can be ignored by calling {@link RecursiveComparisonAssert#ignoringCollectionOrder ignoringCollectionOrder} allowing ordered/unordered iterable comparison, note that {@link RecursiveComparisonAssert#ignoringCollectionOrder ignoringCollectionOrder} is applied recursively on any nested iterable fields, if this behavior is too generic,
* use the more fine-grained {@link RecursiveComparisonAssert#ignoringCollectionOrderInFields(String...) ignoringCollectionOrderInFields} or
* {@link RecursiveComparisonAssert#ignoringCollectionOrderInFieldsMatchingRegexes(String...) ignoringCollectionOrderInFieldsMatchingRegexes}.
*
* @param configuration the recursive comparison configuration.
*
* @return {@code this} assertion object.
* @since 3.17.0
* @see RecursiveComparisonConfiguration
*/
public SELF usingRecursiveFieldByFieldElementComparator(RecursiveComparisonConfiguration configuration) {
return usingElementComparator(new ConfigurableRecursiveFieldByFieldComparator(configuration));
}
/**
* Enable using a recursive field by field comparison strategy when calling the chained {@link RecursiveComparisonAssert},
* <p>
* There are differences between this approach and {@link #usingRecursiveFieldByFieldElementComparator(RecursiveComparisonConfiguration)}:
* <ul>
* <li>you can only chain {@link RecursiveComparisonAssert} assertions (basically {@link RecursiveComparisonAssert#isEqualTo(Object) isEqualTo}) and (basically {@link RecursiveComparisonAssert#isNotEqualTo(Object) isNotEqualTo}), no iterable assertions.</li>
* <li>{@link RecursiveComparisonAssert#isEqualTo(Object) isEqualTo} assertion error will report all field differences (very detailed).</li>
* <li>no comparators registered with {@link AbstractIterableAssert#usingComparatorForType(Comparator, Class)} will be used, you need to register them in chained call like {@link RecursiveComparisonAssert#withComparatorForType(Comparator, Class)}.</li>
* </ul>
* <p>
* If you need to chain iterable assertions using recursive comparisons call {@link #usingRecursiveFieldByFieldElementComparator(RecursiveComparisonConfiguration)} instead.
* <p>
* Example:
* <pre><code class='java'> public
|
AbstractIterableAssert
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsBlock.java
|
{
"start": 2729,
"end": 4514
}
|
class ____ the state but does no writing.
*
* @param buffer the buffer containing the bytes to write.
* @param offset the offset in the buffer to start writing from.
* @param length the number of bytes to write.
* @return the number of bytes written.
* @throws IOException if an I/O error occurs.
*/
public int write(byte[] buffer, int offset, int length) throws IOException {
return activeBlock.write(buffer, offset, length);
}
/**
* Returns remainingCapacity.
* @return remainingCapacity.
*/
public int remainingCapacity() {
return activeBlock.remainingCapacity();
}
/**
* Returns the offset of the block.
*
* @return the offset of the block.
*/
public Long getOffset() {
return offset;
}
@Override
public void close() throws IOException {
if (activeBlock != null) {
activeBlock.close();
}
}
/**
* Returns blockId for the block.
* @return blockId.
*/
public String getBlockId() {
throw new IllegalArgumentException("DFS client does not support blockId");
}
/**
* Gets the AbfsOutputStream.
*
* @return the AbfsOutputStream.
*/
public AbfsOutputStream getOutputStream() {
return outputStream;
}
/**
* Sets the AbfsOutputStream.
*
* @param outputStream the AbfsOutputStream to set.
*/
public void setOutputStream(final AbfsOutputStream outputStream) {
this.outputStream = outputStream;
}
/**
* Returns the block entry.
*
* @return the block entry.
*/
public BlockEntry getBlockEntry() {
return blockEntry;
}
/**
* Sets the block entry.
*
* @param blockEntry the block entry to set.
*/
public void setBlockEntry(final BlockEntry blockEntry) {
this.blockEntry = blockEntry;
}
}
|
verifies
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/InaccessibleTypeBindsTest.java
|
{
"start": 3092,
"end": 3963
}
|
interface ____ {",
" Foo getFoo();",
"}");
CompilerTests.daggerCompiler(foo, fooImpl, module, component)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(0);
subject.generatedSource(goldenFileRule.goldenSource("test/DaggerTestComponent"));
});
}
// Interface is accessible, but the impl is not. Used with a binds in a loop to see if there are
// type issues from doing an assignment to the delegate factory e.g.
// DelegateFactory.setDelegate(provider, new SwitchingProvider<FooImpl>(...));
@Test
public void inaccessibleTypeBoundInALoop() throws Exception {
Source foo =
CompilerTests.javaSource(
"test.Foo",
"package test;",
"",
"public
|
TestComponent
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolPB.java
|
{
"start": 1701,
"end": 1788
}
|
interface ____ extends
NamenodeProtocolService.BlockingInterface {
}
|
NamenodeProtocolPB
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-ddb/src/main/java/org/apache/camel/component/aws2/ddb/Ddb2Operations.java
|
{
"start": 856,
"end": 1030
}
|
enum ____ {
BatchGetItems,
DeleteItem,
DeleteTable,
DescribeTable,
GetItem,
PutItem,
Query,
Scan,
UpdateItem,
UpdateTable
}
|
Ddb2Operations
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-rabbitmq/src/test/java/org/apache/camel/component/springrabbit/SpringRabbitMQSendDynamicAwareTest.java
|
{
"start": 1127,
"end": 2757
}
|
class ____ extends CamelTestSupport {
SpringRabbitMQSendDynamicAware springRabbitMQSendDynamicAware;
@Override
public void doPostSetup() {
this.springRabbitMQSendDynamicAware = new SpringRabbitMQSendDynamicAware();
}
@Test
public void testUriParsing() throws Exception {
this.springRabbitMQSendDynamicAware.setScheme("spring-rabbitmq");
Exchange exchange = createExchangeWithBody("The Body");
SendDynamicAware.DynamicAwareEntry entry = new SendDynamicAware.DynamicAwareEntry(
"spring-rabbitmq:destination", "spring-rabbitmq:${header.test}", null, null);
Processor processor = this.springRabbitMQSendDynamicAware.createPreProcessor(createExchangeWithBody("Body"), entry);
processor.process(exchange);
assertEquals("destination", exchange.getMessage().getHeader(SpringRabbitMQConstants.EXCHANGE_OVERRIDE_NAME));
}
@Test
public void testSlashedUriParsing() throws Exception {
this.springRabbitMQSendDynamicAware.setScheme("spring-rabbitmq");
Exchange exchange = createExchangeWithBody("The Body");
SendDynamicAware.DynamicAwareEntry entry = new SendDynamicAware.DynamicAwareEntry(
"spring-rabbitmq://destination", "spring-rabbitmq://${header.test}", null, null);
Processor processor = this.springRabbitMQSendDynamicAware.createPreProcessor(createExchangeWithBody("Body"), entry);
processor.process(exchange);
assertEquals("destination", exchange.getMessage().getHeader(SpringRabbitMQConstants.EXCHANGE_OVERRIDE_NAME));
}
}
|
SpringRabbitMQSendDynamicAwareTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/sps/ExternalStoragePolicySatisfier.java
|
{
"start": 1986,
"end": 2191
}
|
class ____ {
public static final Logger LOG = LoggerFactory.getLogger(ExternalStoragePolicySatisfier.class);
private ExternalStoragePolicySatisfier() {
// This is just a
|
ExternalStoragePolicySatisfier
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/state/internals/StoreSerdeInitializer.java
|
{
"start": 2961,
"end": 3053
}
|
interface ____<T> {
Serde<T> prepareSerde(Serde<T> serde, SerdeGetter getter);
}
|
PrepareFunc
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/Ordering.java
|
{
"start": 6176,
"end": 7163
}
|
class ____<T extends @Nullable Object> implements Comparator<T> {
// Natural order
/**
* Returns a serializable ordering that uses the natural order of the values. The ordering throws
* a {@link NullPointerException} when passed a null parameter.
*
* <p>The type specification is {@code <C extends Comparable>}, instead of the technically correct
* {@code <C extends Comparable<? super C>>}, to support legacy types from before Java 5.
*
* <p><b>Java 8+ users:</b> use {@link Comparator#naturalOrder} instead.
*/
@SuppressWarnings({"unchecked", "rawtypes"})
// TODO(kevinb): right way to explain this??
// plus https://github.com/google/guava/issues/989
public static <C extends Comparable> Ordering<C> natural() {
return (Ordering<C>) NaturalOrdering.INSTANCE;
}
// Static factories
/**
* Returns an ordering based on an <i>existing</i> comparator instance. Note that it is
* unnecessary to create a <i>new</i> anonymous inner
|
Ordering
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/util/xml/AbstractStaxXMLReaderTests.java
|
{
"start": 9260,
"end": 9620
}
|
class ____ implements InvocationArgumentsAdapter {
@Override
public Object[] adaptArguments(Object[] arguments) {
for (int i = 0; i < arguments.length; i++) {
if (arguments[i] instanceof Attributes) {
arguments[i] = new PartialAttributes((Attributes) arguments[i]);
}
}
return arguments;
}
}
private static
|
PartialAttributesAdapter
|
java
|
jhy__jsoup
|
src/main/java/org/jsoup/nodes/Range.java
|
{
"start": 4168,
"end": 6778
}
|
class ____ {
private final int pos, lineNumber, columnNumber;
/**
Create a new Position object. Called by the TreeBuilder if source position tracking is on.
* @param pos position index
* @param lineNumber line number
* @param columnNumber column number
*/
public Position(int pos, int lineNumber, int columnNumber) {
this.pos = pos;
this.lineNumber = lineNumber;
this.columnNumber = columnNumber;
}
/**
Gets the position index (0-based) of the original input source that this Position was read at. This tracks the
total number of characters read into the source at this position, regardless of the number of preceding lines.
* @return the position, or {@code -1} if untracked.
*/
public int pos() {
return pos;
}
/**
Gets the line number (1-based) of the original input source that this Position was read at.
* @return the line number, or {@code -1} if untracked.
*/
public int lineNumber() {
return lineNumber;
}
/**
Gets the cursor number (1-based) of the original input source that this Position was read at. The cursor number
resets to 1 on every new line.
* @return the cursor number, or {@code -1} if untracked.
*/
public int columnNumber() {
return columnNumber;
}
/**
Test if this position was tracked during parsing.
* @return true if this was tracked during parsing, false otherwise (and all fields will be {@code -1}).
*/
public boolean isTracked() {
return this != UntrackedPos;
}
/**
Gets a String presentation of this Position, in the format {@code line,column:pos}.
* @return a String
*/
@Override
public String toString() {
return lineNumber + "," + columnNumber + ":" + pos;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Position position = (Position) o;
if (pos != position.pos) return false;
if (lineNumber != position.lineNumber) return false;
return columnNumber == position.columnNumber;
}
@Override
public int hashCode() {
return Objects.hash(pos, lineNumber, columnNumber);
}
}
public static
|
Position
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/expressions/converter/converters/ArrayConverter.java
|
{
"start": 1501,
"end": 2179
}
|
class ____ extends CustomizedConverter {
@Override
public RexNode convert(CallExpression call, CallExpressionConvertRule.ConvertContext context) {
List<RexNode> childrenRexNode = toRexNodes(context, call.getChildren());
RelDataType relDataType =
context.getTypeFactory()
.createFieldTypeFromLogicalType(call.getOutputDataType().getLogicalType());
return context.getRelBuilder()
.getRexBuilder()
.makeCall(
relDataType,
FlinkSqlOperatorTable.ARRAY_VALUE_CONSTRUCTOR,
childrenRexNode);
}
}
|
ArrayConverter
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/rerank/IbmWatsonxRerankModelTests.java
|
{
"start": 579,
"end": 1121
}
|
class ____ extends ESTestCase {
public static IbmWatsonxRerankModel createModel(String model, String projectId, URI uri, String apiVersion, String apiKey) {
return new IbmWatsonxRerankModel(
"id",
TaskType.RERANK,
"service",
new IbmWatsonxRerankServiceSettings(uri, apiVersion, model, projectId, null),
new IbmWatsonxRerankTaskSettings(2, true, 100),
new DefaultSecretSettings(new SecureString(apiKey.toCharArray()))
);
}
}
|
IbmWatsonxRerankModelTests
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/filesystem/FsSegmentDataInputStream.java
|
{
"start": 1061,
"end": 1375
}
|
class ____ a {@link org.apache.flink.util.WrappingProxy} for {@link FSDataInputStream} that is
* used to read from a file segment. It is opened with a starting position of the file. It treats
* the argument of seek(long) as an offset relative to the starting position, rather than an
* absolute value.
*/
public
|
is
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/initializers/annotation/BarConfig.java
|
{
"start": 893,
"end": 957
}
|
class ____ {
@Bean
String bar() {
return "bar";
}
}
|
BarConfig
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/test/java/io/vertx/tests/pool/ConnectionPoolTest.java
|
{
"start": 1253,
"end": 38662
}
|
class ____ extends VertxTestBase {
VertxInternal vertx;
@Override
public void setUp() throws Exception {
super.setUp();
this.vertx = (VertxInternal) super.vertx;
}
@Test
public void testConnect() {
ContextInternal context = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 10 }, 10);
Connection expected = new Connection();
pool
.acquire(context, 0, onSuccess2(lease -> {
assertSame(expected, lease.get());
assertEquals(0, pool.requests());
testComplete();
}));
assertEquals(1, pool.requests());
ConnectionRequest request = mgr.assertRequest();
assertSame(context.nettyEventLoop(), request.context.nettyEventLoop());
request.connect(expected, 0);
await();
}
@Test
public void testAcquireRecycledConnection() throws Exception {
ContextInternal context = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 10 });
Connection expected = new Connection();
CountDownLatch latch = new CountDownLatch(1);
pool
.acquire(context, 0, onSuccess2(lease -> {
lease.recycle();
latch.countDown();
}));
ConnectionRequest request = mgr.assertRequest();
assertSame(context.nettyEventLoop(), request.context.nettyEventLoop());
request.connect(expected, 0);
awaitLatch(latch);
pool.acquire(context, 0, onSuccess2(lease -> {
assertSame(expected, lease.get());
testComplete();
}));
await();
}
@Test
public void testRecycleRemovedConnection() throws Exception {
ContextInternal context = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 10 }, 10);
Connection expected1 = new Connection();
Future<Lease<Connection>> fut = Future.future(p -> pool.acquire(context, 0, p));
ConnectionRequest request1 = mgr.assertRequest();
request1.connect(expected1, 0);
CountDownLatch latch = new CountDownLatch(1);
fut.onComplete(onSuccess(lease -> {
request1.listener.onRemove();
lease.recycle();
latch.countDown();
}));
awaitLatch(latch);
Connection expected2 = new Connection();
pool.acquire(context, 0, onSuccess2(lease -> {
assertSame(expected2, lease.get());
testComplete();
}));
ConnectionRequest request2 = mgr.assertRequest();
request2.connect(expected2, 0);
await();
}
@Test
public void testConcurrency() throws Exception {
ContextInternal context = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 10 }, 10);
Connection expected = new Connection();
CountDownLatch latch = new CountDownLatch(1);
pool
.acquire(context, 0, onSuccess2(conn -> {
latch.countDown();
}));
ConnectionRequest request = mgr.assertRequest();
request.concurrency(2).connect(expected, 0);
awaitLatch(latch);
pool.acquire(context, 0, onSuccess2(lease -> {
assertSame(lease.get(), expected);
testComplete();
}));
await();
}
@Test
public void testIncreaseConcurrency() throws Exception {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 });
ContextInternal ctx = vertx.createEventLoopContext();
Connection conn1 = new Connection();
CountDownLatch l1 = new CountDownLatch(1);
pool.acquire(ctx, 0, onSuccess2(lease -> l1.countDown()));
CountDownLatch l2 = new CountDownLatch(1);
pool
.acquire(ctx, 0, onSuccess2(lease -> {
l2.countDown();
}));
CountDownLatch l3 = new CountDownLatch(1);
pool.acquire(ctx, 0, onSuccess2(lease -> l3.countDown()));
ConnectionRequest request = mgr.assertRequest();
request.connect(conn1, 0);
awaitLatch(l1);
assertEquals(1, l2.getCount());
request.listener.onConcurrencyChange(2);
awaitLatch(l2);
request.listener.onConcurrencyChange(3);
awaitLatch(l3);
}
@Test
public void testSatisfyPendingWaitersWithExtraConcurrency() throws Exception {
ContextInternal context = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 }, 2);
Connection expected = new Connection();
AtomicInteger seq = new AtomicInteger();
pool
.acquire(context, 0, onSuccess2(lease -> {
assertSame(lease.get(), expected);
assertEquals(0, seq.getAndIncrement());
}));
pool
.acquire(context, 0, onSuccess2(lease -> {
assertSame(lease.get(), expected);
assertEquals(1, seq.getAndIncrement());
testComplete();
}));
ConnectionRequest request = mgr.assertRequest();
request.concurrency(2).connect(expected, 0);
await();
}
@Test
public void testEmptyConcurrency() {
ContextInternal context = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 }, 2);
Connection expected = new Connection();
AtomicInteger seq = new AtomicInteger();
pool
.acquire(context, 0, onSuccess2(lease -> {
assertSame(lease.get(), expected);
assertEquals(1, seq.getAndIncrement());
}));
pool
.acquire(context, 0, onSuccess2(lease -> {
assertSame(lease.get(), expected);
assertEquals(2, seq.getAndIncrement());
testComplete();
}));
ConnectionRequest request = mgr.assertRequest();
request.concurrency(0).connect(expected, 0);
assertEquals(0, seq.getAndIncrement());
request.concurrency(2);
await();
}
@Test
public void testDecreaseConcurrency() throws Exception {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 });
ContextInternal ctx = vertx.createEventLoopContext();
Connection conn1 = new Connection();
CountDownLatch l1 = new CountDownLatch(2);
CountDownLatch l2 = new CountDownLatch(1);
Lease<Connection>[] leases = new Lease[3];
pool
.acquire(ctx, 0, onSuccess2(lease -> {
leases[0] = lease;
l1.countDown();
}));
pool
.acquire(ctx, 0, onSuccess2(lease -> {
leases[1] = lease;
l1.countDown();
}));
pool
.acquire(ctx, 0, onSuccess2(lease -> {
leases[2] = lease;
l2.countDown();
}));
ConnectionRequest request = mgr.assertRequest();
request.concurrency(2).connect(conn1, 0);
awaitLatch(l1);
assertEquals(1, l2.getCount());
request.listener.onConcurrencyChange(1);
ctx.runOnContext(v -> {
leases[0].recycle();
assertEquals(1, l2.getCount());
leases[1].recycle();
assertEquals(0, l2.getCount());
testComplete();
});
await();
}
@Test
public void testWaiter() throws Exception {
ContextInternal ctx1 = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 });
Connection expected = new Connection();
CompletableFuture<Lease<Connection>> latch = new CompletableFuture<>();
pool.acquire(ctx1, 0, onSuccess2(latch::complete));
ConnectionRequest request = mgr.assertRequest();
request.connect(expected, 0);
Lease<Connection> lease1 = latch.get(10, TimeUnit.SECONDS);
AtomicBoolean recycled = new AtomicBoolean();
ContextInternal ctx2 = vertx.createEventLoopContext();
pool
.acquire(ctx2, 0, onSuccess2(lease2 -> {
assertSame(ctx1.nettyEventLoop(), ((ContextInternal) Vertx.currentContext()).nettyEventLoop());
assertTrue(recycled.get());
testComplete();
}));
assertEquals(1, pool.waiters());
recycled.set(true);
lease1.recycle();
await();
}
@Test
public void testRemoveSingleConnection() throws Exception {
ContextInternal ctx1 = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 }, 1);
Connection conn = new Connection();
CompletableFuture<Lease<Connection>> latch = new CompletableFuture<>();
pool.acquire(ctx1, 0, onSuccess2(latch::complete));
ConnectionRequest request = mgr.assertRequest();
request.connect(conn, 0);
latch.get(10, TimeUnit.SECONDS);
request.listener.onRemove();
assertEquals(0, pool.size());
assertEquals(0, pool.capacity());
}
@Test
public void testRemoveFirstConnection() throws Exception {
ContextInternal ctx = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 2 }, 2);
Connection conn1 = new Connection();
CompletableFuture<Lease<Connection>> latch1 = new CompletableFuture<>();
pool.acquire(ctx, 0, onSuccess2(latch1::complete));
Connection conn2 = new Connection();
CompletableFuture<Lease<Connection>> latch2 = new CompletableFuture<>();
pool.acquire(ctx, 0, onSuccess2(latch2::complete));
ConnectionRequest request1 = mgr.assertRequest();
request1.connect(conn1, 0);
ConnectionRequest request2 = mgr.assertRequest();
request2.connect(conn2, 0);
latch1.get(10, TimeUnit.SECONDS);
request1.listener.onRemove();
assertEquals(1, pool.size());
assertEquals(1, pool.capacity());
}
@Test
public void testRemoveSingleConnectionWithWaiter() throws Exception {
ContextInternal ctx1 = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 });
Connection connection1 = new Connection();
CompletableFuture<Lease<Connection>> latch = new CompletableFuture<>();
pool
.acquire(ctx1, 0, onSuccess2(latch::complete));
ConnectionRequest request1 = mgr.assertRequest();
request1.connect(connection1, 0);
Lease<Connection> lease1 = latch.get(10, TimeUnit.SECONDS);
assertSame(connection1, lease1.get());
AtomicBoolean evicted = new AtomicBoolean();
Connection conn2 = new Connection();
ContextInternal ctx2 = vertx.createEventLoopContext();
pool
.acquire(ctx2, 0, onSuccess2(lease2 -> {
assertSame(ctx2.nettyEventLoop(), ((ContextInternal) Vertx.currentContext()).nettyEventLoop());
assertTrue(evicted.get());
assertSame(conn2, lease2.get());
testComplete();
}));
assertEquals(1, pool.waiters());
evicted.set(true);
request1.listener.onRemove();
ConnectionRequest request2 = mgr.assertRequest();
request2.connect(conn2, 0);
await();
}
@Test
public void testConnectFailureWithPendingWaiter() throws Exception {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1, 2 }, 2);
Throwable failure = new Throwable();
Connection expected = new Connection();
CountDownLatch latch = new CountDownLatch(1);
ContextInternal ctx1 = vertx.createEventLoopContext();
pool
.acquire(ctx1, 0, onFailure2(cause -> {
assertSame(failure, cause);
assertEquals(1, pool.requests());
latch.countDown();
}));
ContextInternal ctx2 = vertx.createEventLoopContext();
pool.acquire(ctx2, 1, onSuccess2(lease -> {
assertSame(expected, lease.get());
testComplete();
}));
ConnectionRequest request1 = mgr.assertRequest();
assertEquals(2, pool.capacity());
request1.fail(failure);
awaitLatch(latch);
assertEquals(1, pool.capacity());
ConnectionRequest request2 = mgr.assertRequest();
request2.connect(expected, 0);
await();
}
@Test
public void testExpireFirst() throws Exception {
assertEquals(Arrays.asList(0), testExpire(1, 10, 0));
assertEquals(Arrays.asList(0), testExpire(2, 10, 0));
assertEquals(Arrays.asList(0), testExpire(3, 10, 0));
}
@Test
public void testExpireLast() throws Exception {
assertEquals(Arrays.asList(0), testExpire(1, 10, 0));
assertEquals(Arrays.asList(1), testExpire(2, 10, 1));
assertEquals(Arrays.asList(2), testExpire(3, 10, 2));
}
@Test
public void testExpireMiddle() throws Exception {
assertEquals(Arrays.asList(1), testExpire(3, 10, 1));
}
@Test
public void testExpireSome() throws Exception {
assertEquals(Arrays.asList(2, 1), testExpire(3, 10, 1, 2));
assertEquals(Arrays.asList(2, 1, 0), testExpire(3, 10, 0, 1, 2));
assertEquals(Arrays.asList(1, 0), testExpire(3, 10, 0, 1));
}
private List<Integer> testExpire(int num, int max, int... recycled) throws Exception {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { max }, max);
CountDownLatch latch = new CountDownLatch(num);
List<Lease<Connection>> leases = new ArrayList<>();
ContextInternal ctx = vertx.createEventLoopContext();
for (int i = 0;i < num;i++) {
Connection expected = new Connection();
pool
.acquire(ctx, 0, onSuccess2(lease -> {
assertSame(expected, lease.get());
leases.add(lease);
latch.countDown();
}));
mgr.assertRequest().connect(expected, 0);
}
awaitLatch(latch);
for (int i = 0;i < recycled.length;i++) {
leases.get(recycled[i]).recycle();
}
CompletableFuture<List<Integer>> cf = new CompletableFuture<>();
pool
.evict(c -> true, (res, err) -> {
if (err == null) {
// assertEquals(num - recycled.length, pool.capacity());
List<Integer> lst = new ArrayList<>();
List<Connection> all = leases.stream().map(Lease::get).collect(Collectors.toList());
res.forEach(c -> lst.add(all.indexOf(c)));
cf.complete(lst);
} else {
cf.completeExceptionally(err);
}
});
return cf.get();
}
@Test
public void testRemoveEvicted() throws Exception {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 }, 1);
// List<Lease<Connection>> leases = new ArrayList<>();
ContextInternal ctx = vertx.createEventLoopContext();
CountDownLatch latch1 = new CountDownLatch(1);
pool
.acquire(ctx, 0, onSuccess2(lease -> {
lease.recycle();
latch1.countDown();
}));
ConnectionRequest request = mgr.assertRequest();
Connection conn = new Connection();
request.connect(conn, 0);
awaitLatch(latch1);
CountDownLatch latch2 = new CountDownLatch(1);
pool.evict(c -> c == conn, onSuccess2(l -> latch2.countDown()));
awaitLatch(latch2);
request.listener.onRemove();
assertEquals(0, pool.size());
}
@Test
public void testSynchronousEviction() throws Exception {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 }, 1);
ContextInternal ctx = vertx.createEventLoopContext();
CountDownLatch latch1 = new CountDownLatch(1);
CountDownLatch latch2 = new CountDownLatch(1);
CountDownLatch latch3 = new CountDownLatch(1);
pool
.acquire(ctx, 0, onSuccess2(lease -> {
lease.recycle();
latch1.countDown();
}));
ConnectionRequest request = mgr.assertRequest();
Connection conn1 = new Connection();
request.connect(conn1, 0);
awaitLatch(latch1);
Connection conn2 = new Connection();
pool.evict(candidate -> {
assertSame(candidate, conn1);
pool
.acquire(ctx, 0, onSuccess2(lease -> {
Connection c2 = lease.get();
assertSame(conn2, c2);
latch3.countDown();
}));
return true;
}, onSuccess2(list -> {
latch2.countDown();
}));
awaitLatch(latch2);
request = mgr.assertRequest();
request.connect(conn2, 0);
awaitLatch(latch3);
}
@Test
public void testConnectionInProgressShouldNotBeEvicted() {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 }, 5);
ContextInternal ctx = vertx.createEventLoopContext();
pool.acquire(ctx, 0, (res, err) -> {});
mgr.assertRequest();
pool
.evict(c -> {
fail();
return false;
}, onSuccess2(v -> testComplete()));
await();
}
@Test
public void testRecycleRemoveConnection() throws Exception {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 }, 1);
Connection expected = new Connection();
CompletableFuture<Lease<Connection>> latch = new CompletableFuture<>();
ContextInternal ctx1 = vertx.createEventLoopContext();
pool.acquire(ctx1, 0, onSuccess2(latch::complete));
ConnectionRequest request = mgr.assertRequest();
request.connect(expected, 0);
Lease<Connection> lease = latch.get();
request.listener.onRemove();
assertEquals(0, pool.size());
lease.recycle();
assertEquals(0, pool.size());
}
@Test
public void testRecycleMultiple() throws Exception {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 }, 1);
Connection expected = new Connection();
CompletableFuture<Lease<Connection>> latch = new CompletableFuture<>();
ContextInternal ctx1 = vertx.createEventLoopContext();
pool.acquire(ctx1, 0, onSuccess2(latch::complete));
ConnectionRequest request = mgr.assertRequest();
request.connect(expected, 0);
Lease<Connection> lease = latch.get();
lease.recycle();
try {
lease.recycle();
fail();
} catch (IllegalStateException ignore) {
}
}
@Test
public void testMaxWaiters() {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 }, 5);
ContextInternal ctx = vertx.createEventLoopContext();
for (int i = 0;i < (5);i++) {
pool
.acquire(ctx, 0, (res, err) -> fail());
}
pool
.acquire(ctx, 0, onFailure2(err -> {
assertTrue(err instanceof ConnectionPoolTooBusyException);
testComplete();
}));
await();
}
@Test
public void testHeterogeneousSizes() throws Exception {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 5, 2 });
ContextInternal ctx = vertx.createEventLoopContext();
CountDownLatch latch = new CountDownLatch(5);
for (int i = 0;i < 5;i++) {
pool.acquire(ctx, 0, onSuccess2(lease -> latch.countDown()));
Connection conn = new Connection();
mgr.assertRequest().connect(conn, 0);
}
awaitLatch(latch);
assertEquals(10, pool.capacity());
pool
.acquire(ctx, 1, onSuccess2(lease -> {
}));
assertEquals(1, pool.waiters());
}
@Test
public void testClose() throws Exception {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 2 }, 2);
ContextInternal ctx = vertx.createEventLoopContext();
Connection conn1 = new Connection();
pool
.acquire(ctx, 0, onSuccess2(lease -> {
}));
waitFor(3);
pool.acquire(ctx, 0, onFailure2(err -> complete()));
pool.acquire(ctx, 0, onFailure2(err -> complete()));
mgr.assertRequest().connect(conn1, 0);
mgr.assertRequest();
pool
.close(onSuccess2(lst -> {
assertEquals(2, lst.size());
assertEquals(0, pool.size());
complete();
}));
await();
}
@Test
public void testCloseTwice() throws Exception {
AtomicBoolean isReentrant = new AtomicBoolean();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 2 }, 2);
CountDownLatch latch = new CountDownLatch(1);
pool
.close(onSuccess2(lst -> {
AtomicBoolean inCallback = new AtomicBoolean();
pool
.close(onFailure2(err -> {
isReentrant.set(inCallback.get());
latch.countDown();
}));
}));
awaitLatch(latch);
assertFalse(isReentrant.get());
}
@Test
public void testUseAfterClose() throws Exception {
waitFor(3);
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 });
ContextInternal ctx = vertx.createEventLoopContext();
CompletableFuture<PoolWaiter<Connection>> waiterFut = new CompletableFuture<>();
pool.acquire(ctx, new PoolWaiter.Listener<>() {
@Override
public void onConnect(PoolWaiter<Connection> waiter) {
waiterFut.complete(waiter);
}
}, 0, (res, err) -> {});
PoolWaiter<Connection> waiter = waiterFut.get(20, TimeUnit.SECONDS);
ConnectionRequest request = mgr.assertRequest();
CountDownLatch latch = new CountDownLatch(1);
pool
.close(onSuccess2(lst -> {
latch.countDown();
}));
awaitLatch(latch);
pool.evict(c -> true, onFailure2(err -> complete()));
pool.acquire(ctx, 0, onFailure2(err -> complete()));
pool.cancel(waiter, onFailure2(err -> complete()));
request.connect(new Connection(), 0);
await();
}
@Test
public void testAcquireClosedConnection() throws Exception {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 });
ContextInternal context = vertx.createEventLoopContext();
pool.acquire(context, 0, onSuccess2(Lease::recycle));
Connection expected = new Connection();
ConnectionRequest request = mgr.assertRequest();
request.connect(expected, 0);
CountDownLatch latch1 = new CountDownLatch(1);
CountDownLatch latch2 = new CountDownLatch(1);
context.runOnContext(v -> {
pool.evict(conn -> {
// Make sure that the event-loop thread is busy and pool lock are borrowed
latch1.countDown();
try {
// Wait until the acquisition and removal tasks are enqueued
latch2.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
// When we return, the tasks will be executed by this thread
// but the acquisition callback is a pool post action executed after the removal task is executed
return false;
}, (res, err) -> {});
});
awaitLatch(latch1);
AtomicBoolean closed = new AtomicBoolean();
pool
.acquire(context, 0, onSuccess2(lease -> {
// Get not null closed connection
assertNotNull(lease.get());
assertTrue(closed.get());
testComplete();
}));
request.listener.onRemove();
closed.set(true);
latch2.countDown();
await();
}
@Test
public void testConnectSuccessAfterClose() {
testConnectResultAfterClose(true);
}
@Test
public void testConnectFailureAfterClose() {
testConnectResultAfterClose(false);
}
private void testConnectResultAfterClose(boolean success) {
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 });
ContextInternal ctx = vertx.createEventLoopContext();
AtomicInteger acquired = new AtomicInteger();
pool
.acquire(ctx, 0, (res, err) -> {
assertEquals(0, acquired.getAndIncrement());
});
assertEquals(1, pool.size());
ConnectionRequest request = mgr.assertRequest();
Future<List<Future<Connection>>> closeResult = Future.future(p -> pool.close(p));
Throwable cause = new Throwable();
Connection expected = new Connection();
if (success) {
request.connect(expected, 0);
} else {
request.fail(cause);
}
assertTrue(closeResult.isComplete());
List<Future<Connection>> connections = closeResult.result();
assertEquals(1, connections.size());
assertEquals(success, connections.get(0).succeeded());
assertEquals(0, pool.size());
if (success) {
assertEquals(expected, connections.get(0).result());
} else {
assertEquals(cause, connections.get(0).cause());
}
waitUntil(() -> acquired.get() == 1);
}
@Test
public void testCancelQueuedWaiters() throws Exception {
waitFor(1);
ContextInternal context = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 });
CompletableFuture<PoolWaiter<Connection>> w = new CompletableFuture<>();
pool
.acquire(context, 0, onSuccess2(lease -> {
}));
pool.acquire(context, new PoolWaiter.Listener<>() {
@Override
public void onEnqueue(PoolWaiter<Connection> waiter) {
w.complete(waiter);
}
}, 0, (res, err) -> fail());
PoolWaiter<Connection> waiter = w.get(10, TimeUnit.SECONDS);
pool
.cancel(waiter, onSuccess2(removed1 -> {
assertTrue(removed1);
assertEquals(0, pool.waiters());
pool
.cancel(waiter, onSuccess2(removed2 -> {
assertFalse(removed2);
assertEquals(0, pool.waiters());
testComplete();
}));
}));
await();
}
@Test
public void testCancelWaiterBeforeConnectionSuccess() throws Exception {
testCancelWaiterBeforeConnection(true, 0);
}
@Test
public void testCancelWaiterBeforeConnectionSuccessWithExtraWaiters() throws Exception {
testCancelWaiterBeforeConnection(true, 2);
}
@Test
public void testCancelWaiterBeforeConnectionFailure() throws Exception {
testCancelWaiterBeforeConnection(false, 0);
}
public void testCancelWaiterBeforeConnection(boolean success, int extra) throws Exception {
if (!success && extra > 0) {
throw new IllegalArgumentException();
}
waitFor(1);
ContextInternal context = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 }, 1 + extra);
CompletableFuture<PoolWaiter<Connection>> waiterLatch = new CompletableFuture<>();
pool.acquire(context, new PoolWaiter.Listener<>() {
@Override
public void onConnect(PoolWaiter<Connection> waiter) {
waiterLatch.complete(waiter);
}
}, 0, (res, err) -> fail());
waiterLatch.get(10, TimeUnit.SECONDS);
CountDownLatch enqueuedLatch = new CountDownLatch(extra);
CountDownLatch recycledLatch = new CountDownLatch(extra);
for (int i = 0; i < extra; i++) {
pool.acquire(context, new PoolWaiter.Listener<>() {
@Override
public void onEnqueue(PoolWaiter<Connection> waiter) {
enqueuedLatch.countDown();
}
}, 0, onSuccess2(conn -> {
conn.recycle();
recycledLatch.countDown();
}));
}
awaitLatch(enqueuedLatch);
ConnectionRequest request = mgr.assertRequest();
CountDownLatch latch = new CountDownLatch(1);
pool
.cancel(waiterLatch.get(10, TimeUnit.SECONDS), onSuccess2(removed -> {
assertTrue(removed);
latch.countDown();
}));
awaitLatch(latch);
if (success) {
request.connect(new Connection(), 0);
} else {
request.fail(new Throwable());
}
awaitLatch(recycledLatch);
// Check we can acquire the same connection again
CountDownLatch doneLatch = new CountDownLatch(extra);
for (int i = 0;i < extra;i++) {
pool
.acquire(context, 0, onSuccess2(conn -> {
doneLatch.countDown();
conn.recycle();
}));
}
awaitLatch(doneLatch);
}
@Test
public void testCancelWaiterAfterConnectionSuccess() throws Exception {
testCancelWaiterAfterConnectionSuccess(true);
}
@Test
public void testCancelWaiterAfterConnectionFailure() throws Exception {
testCancelWaiterAfterConnectionSuccess(false);
}
public void testCancelWaiterAfterConnectionSuccess(boolean success) throws Exception {
waitFor(1);
ContextInternal context = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 1 }, 1);
CompletableFuture<PoolWaiter<Connection>> w = new CompletableFuture<>();
CountDownLatch latch = new CountDownLatch(1);
pool.acquire(context, new PoolWaiter.Listener<>() {
@Override
public void onConnect(PoolWaiter<Connection> waiter) {
w.complete(waiter);
}
}, 0, (res, err) -> {
latch.countDown();
});
w.get(10, TimeUnit.SECONDS);
ConnectionRequest request = mgr.assertRequest();
if (success) {
request.connect(new Connection(), 0);
} else {
request.fail(new Throwable());
}
awaitLatch(latch);
pool
.cancel(w.get(10, TimeUnit.SECONDS), onSuccess2(removed -> {
assertFalse(removed);
testComplete();
}));
await();
}
@Test
public void testConnectionSelector() throws Exception {
waitFor(1);
ContextInternal context = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 2 });
CountDownLatch latch1 = new CountDownLatch(1);
pool
.acquire(context, 0, onSuccess2(lease -> {
lease.recycle();
latch1.countDown();
}));
Connection conn1 = new Connection();
mgr.assertRequest().connect(conn1, 0);
awaitLatch(latch1);
pool.connectionSelector((waiter, list) -> {
assertEquals(1, list.size());
PoolConnection<Connection> pooled = list.get(0);
assertEquals(1, pooled.available());
assertEquals(1, pooled.concurrency());
assertSame(conn1, pooled.get());
assertSame(context.nettyEventLoop(), pooled.context().nettyEventLoop());
assertSame(context, waiter.context());
return pooled;
});
pool
.acquire(context, 0, onSuccess2(lease -> {
testComplete();
}));
await();
}
@Test
public void testDefaultSelector() throws Exception {
ContextImpl context1 = (ContextImpl) vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 10 }, 10);
CountDownLatch latch1 = new CountDownLatch(1);
pool
.acquire(context1, 0, onSuccess2(lease -> {
lease.recycle();
latch1.countDown();
}));
Connection expected = new Connection();
assertEquals(1, pool.requests());
ConnectionRequest request = mgr.assertRequest();
request.connect(expected, 0);
awaitLatch(latch1);
CountDownLatch latch2 = new CountDownLatch(1);
pool
.acquire(context1, 0, onSuccess2(lease -> {
assertEquals(expected, lease.get());
lease.recycle();
latch2.countDown();
}));
awaitLatch(latch2);
CountDownLatch latch3 = new CountDownLatch(1);
ContextInternal context2 = vertx
.contextBuilder()
.withEventLoop(context1.nettyEventLoop())
.withWorkerPool(context1.workerPool())
.withClassLoader(context1.classLoader())
.build();
pool
.acquire(context2, 0, onSuccess2(lease -> {
assertEquals(expected, lease.get());
lease.recycle();
latch3.countDown();
}));
awaitLatch(latch3);
}
@Test
public void testDefaultContextProviderUnwrap() {
ContextInternal context = vertx.createEventLoopContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 10 }, 10);
pool
.acquire(context.duplicate(), 0, onSuccess2(lease -> {
}));
assertEquals(1, pool.requests());
ConnectionRequest request = mgr.assertRequest();
assertSame(context.nettyEventLoop(), request.context.nettyEventLoop());
}
@Test
public void testDefaultContextProviderReusesSameEventLoop() {
ContextInternal context = vertx.createWorkerContext();
ConnectionManager mgr = new ConnectionManager();
ConnectionPool<Connection> pool = ConnectionPool.pool(mgr, new int[] { 10 }, 10);
pool
.acquire(context.duplicate(), 0, onSuccess2(lease -> {
}));
assertEquals(1, pool.requests());
ConnectionRequest request = mgr.assertRequest();
assertSame(context.nettyEventLoop(), request.context.nettyEventLoop());
}
@Test
public void testPostTasksTrampoline() throws Exception {
int numAcquires = 5;
AtomicReference<ConnectionPool<Connection>> ref = new AtomicReference<>();
ContextInternal ctx = vertx.createEventLoopContext();
List<Integer> res = Collections.synchronizedList(new LinkedList<>());
AtomicInteger seq = new AtomicInteger();
CountDownLatch latch = new CountDownLatch(1 + numAcquires);
int[] count = new int[1];
ConnectionPool<Connection> pool = ConnectionPool.pool(new PoolConnector<Connection>() {
int reentrancy = 0;
@Override
public Future<ConnectResult<Connection>> connect(ContextInternal context, Listener listener) {
assertEquals(0, reentrancy++);
try {
int val = count[0]++;
if (val == 0) {
// Queue extra requests
for (int i = 0;i < numAcquires;i++) {
int num = seq.getAndIncrement();
ref
.get()
.acquire(ctx, 0, onFailure2(err -> {
res.add(num);
latch.countDown();
}));
}
assertEquals(1, count[0]);
}
return Future.failedFuture("failure");
} finally {
reentrancy--;
}
}
@Override
public boolean isValid(Connection connection) {
return true;
}
}, new int[]{1}, 1 + numAcquires);
ref.set(pool);
ctx.runOnContext(v -> {
int num = seq.getAndIncrement();
pool
.acquire(ctx, 0, onFailure2(err -> {
res.add(num);
latch.countDown();
}));
});
awaitLatch(latch);
assertEquals(1 + numAcquires, count[0]);
List<Integer> expected = IntStream.range(0, numAcquires + 1).boxed().collect(Collectors.toList());
assertEquals(expected, res);
}
@Test
public void testConcurrentPostTasksTrampoline() throws Exception {
AtomicReference<ConnectionPool<Connection>> ref1 = new AtomicReference<>();
AtomicReference<ConnectionPool<Connection>> ref2 = new AtomicReference<>();
ContextInternal ctx = vertx.createEventLoopContext();
List<Integer> res = Collections.synchronizedList(new LinkedList<>());
CountDownLatch latch = new CountDownLatch(4);
ConnectionPool<Connection> pool1 = ConnectionPool.pool(new PoolConnector<>() {
int count = 0;
int reentrancy = 0;
@Override
public Future<ConnectResult<Connection>> connect(ContextInternal context, Listener listener) {
assertEquals(0, reentrancy++);
try {
int val = count++;
if (val == 0) {
ref1
.get()
.acquire(ctx, 0, onFailure2(err -> {
res.add(1);
latch.countDown();
}));
ref2
.get()
.acquire(ctx, 0, onFailure2(err -> {
res.add(2);
latch.countDown();
}));
}
return Future.failedFuture("failure");
} finally {
reentrancy--;
}
}
@Override
public boolean isValid(Connection connection) {
return true;
}
}, new int[]{1}, 2);
ConnectionPool<Connection> pool2 = ConnectionPool.pool(new PoolConnector<>() {
int count = 0;
int reentrancy = 0;
@Override
public Future<ConnectResult<Connection>> connect(ContextInternal context, Listener listener) {
assertEquals(0, reentrancy++);
try {
int val = count++;
if (val == 0) {
ref2
.get()
.acquire(ctx, 0, onFailure2(err -> {
res.add(3);
latch.countDown();
}));
ref1
.get()
.acquire(ctx, 0, onFailure2(err -> {
res.add(4);
latch.countDown();
}));
}
return Future.failedFuture("failure");
} finally {
reentrancy--;
}
}
@Override
public boolean isValid(Connection connection) {
return true;
}
}, new int[]{1}, 2);
ref1.set(pool1);
ref2.set(pool2);
pool1.acquire(ctx, 0, onFailure2(err -> res.add(0)));
awaitLatch(latch);
// assertEquals(Arrays.asList(0, 2, 1, 3, 4), res);
}
static
|
ConnectionPoolTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SpConfiguration.java
|
{
"start": 404,
"end": 459
}
|
interface ____ a SAML Service Provider (SP).
*/
public
|
for
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/error/ShouldNotContain_create_Test.java
|
{
"start": 1694,
"end": 5091
}
|
class ____ {
@Test
void should_create_error_message() {
// GIVEN
ErrorMessageFactory factory = shouldNotContain(list("Yoda"), list("Luke", "Yoda"),
newLinkedHashSet("Yoda"));
// WHEN
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting%n" +
" [\"Yoda\"]%n" +
"not to contain%n" +
" [\"Luke\", \"Yoda\"]%n" +
"but found%n" +
" [\"Yoda\"]%n"));
}
@Test
void should_create_error_message_with_custom_comparison_strategy() {
// GIVEN
ErrorMessageFactory factory = shouldNotContain(list("Yoda"),
list("Luke", "Yoda"),
newLinkedHashSet("Yoda"),
new ComparatorBasedComparisonStrategy(CaseInsensitiveStringComparator.INSTANCE));
// WHEN
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting%n" +
" [\"Yoda\"]%n" +
"not to contain%n" +
" [\"Luke\", \"Yoda\"]%n" +
"but found%n [\"Yoda\"]%n" +
"when comparing values using CaseInsensitiveStringComparator"));
}
@Test
void should_create_error_message_for_file_directory() {
// GIVEN
File directory = mock(File.class);
given(directory.getAbsolutePath()).willReturn("root");
List<File> matchingContent = list(new File("root", "foo.txt"), new File("root", "bar.txt"));
ErrorMessageFactory factory = directoryShouldNotContain(directory, matchingContent, "glob:**.java");
// WHEN
String message = factory.create(new TextDescription("Test"));
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting directory:%n" +
" root%n" +
"not to contain any files matching glob:**.java but found some:%n" +
" [foo.txt, bar.txt]"));
}
@Test
void should_create_error_message_for_path_directory() {
// GIVEN
Path directory = Path.of("root");
List<Path> matchingContent = list(directory.resolve("foo.txt"), directory.resolve("bar.txt"));
ErrorMessageFactory factory = directoryShouldNotContain(directory, matchingContent, "glob:**.java");
// WHEN
String message = factory.create(new TextDescription("Test"));
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting directory:%n" +
" root%n" +
"not to contain any files matching glob:**.java but found some:%n" +
" [%s, %s]",
directory.resolve("foo.txt"), directory.resolve("bar.txt")));
}
}
|
ShouldNotContain_create_Test
|
java
|
spring-projects__spring-framework
|
spring-tx/src/test/java/org/springframework/transaction/interceptor/PlatformTransactionManagerFacade.java
|
{
"start": 1211,
"end": 1750
}
|
class ____ implements PlatformTransactionManager {
/**
* This member can be changed to change behavior class-wide.
*/
public static PlatformTransactionManager delegate;
@Override
public TransactionStatus getTransaction(@Nullable TransactionDefinition definition) {
return delegate.getTransaction(definition);
}
@Override
public void commit(TransactionStatus status) {
delegate.commit(status);
}
@Override
public void rollback(TransactionStatus status) {
delegate.rollback(status);
}
}
|
PlatformTransactionManagerFacade
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsAnyOf_Test.java
|
{
"start": 859,
"end": 1212
}
|
class ____ extends LongArrayAssertBaseTest {
@Override
protected LongArrayAssert invoke_api_method() {
return assertions.containsAnyOf(1L, 2L);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertContainsAnyOf(getInfo(assertions), getActual(assertions), arrayOf(1L, 2L));
}
}
|
LongArrayAssert_containsAnyOf_Test
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/JSONTest3.java
|
{
"start": 277,
"end": 875
}
|
class ____ extends TestCase {
public void test_json() throws Exception {
ExtraProcessor extraProcessor = new ExtraProcessor() {
public void processExtra(Object object, String key, Object value) {
Model model = (Model) object;
model.attributes.put(key, value);
}
};
Model model = JSON.parseObject("{\"id\":1001}", (Type) Model.class, extraProcessor);
Assert.assertEquals(1, model.attributes.size());
Assert.assertEquals(1001, model.attributes.get("id"));
}
public static
|
JSONTest3
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/jackson/OAuth2AuthorizationServerJacksonModule.java
|
{
"start": 2433,
"end": 3723
}
|
class ____ extends SecurityJacksonModule {
public OAuth2AuthorizationServerJacksonModule() {
super(OAuth2AuthorizationServerJacksonModule.class.getName(), new Version(1, 0, 0, null, null, null));
}
@Override
public void configurePolymorphicTypeValidator(BasicPolymorphicTypeValidator.Builder builder) {
builder.allowIfSubType(OAuth2TokenFormat.class)
.allowIfSubType(OAuth2TokenExchangeActor.class)
.allowIfSubType(OAuth2TokenExchangeCompositeAuthenticationToken.class)
.allowIfSubType(SignatureAlgorithm.class)
.allowIfSubType(MacAlgorithm.class)
.allowIfSubType(OAuth2AuthorizationRequest.class)
.allowIfSubType(URL.class);
}
@Override
public void setupModule(SetupContext context) {
context.setMixIn(OAuth2TokenExchangeActor.class, OAuth2TokenExchangeActorMixin.class);
context.setMixIn(OAuth2AuthorizationRequest.class, OAuth2AuthorizationRequestMixin.class);
context.setMixIn(OAuth2TokenExchangeCompositeAuthenticationToken.class,
OAuth2TokenExchangeCompositeAuthenticationTokenMixin.class);
context.setMixIn(SignatureAlgorithm.class, JwsAlgorithmMixin.class);
context.setMixIn(MacAlgorithm.class, JwsAlgorithmMixin.class);
context.setMixIn(OAuth2TokenFormat.class, OAuth2TokenFormatMixin.class);
}
}
|
OAuth2AuthorizationServerJacksonModule
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UndefinedEqualsTest.java
|
{
"start": 14933,
"end": 15277
}
|
class ____ {
void f(CharSequence a, String b) {
assertThat(a).isEqualTo(b);
assertThat(b).isEqualTo(a);
}
}
""")
.addOutputLines(
"Test.java",
"""
import static com.google.common.truth.Truth.assertThat;
|
Test
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java
|
{
"start": 1860,
"end": 5064
}
|
class ____ extends InternalOrder {
static final byte ID = 0;
private final SortOrder order;
private final AggregationPath path;
/**
* Create a new ordering strategy to sort by a sub-aggregation.
*
* @param path path to the sub-aggregation to sort on.
* @param asc direction to sort by: {@code true} for ascending, {@code false} for descending.
* @see AggregationPath
*/
Aggregation(String path, boolean asc) {
order = asc ? SortOrder.ASC : SortOrder.DESC;
this.path = AggregationPath.parse(path);
}
public AggregationPath path() {
return path;
}
@Override
public <T extends Bucket> Comparator<BucketAndOrd<T>> partiallyBuiltBucketComparator(Aggregator aggregator) {
try {
BucketComparator bucketComparator = path.bucketComparator(aggregator, order);
return (lhs, rhs) -> bucketComparator.compare(lhs.ord, rhs.ord);
} catch (IllegalArgumentException e) {
throw new AggregationExecutionException.InvalidPath("Invalid aggregation order path [" + path + "]. " + e.getMessage(), e);
}
}
@Override
public Comparator<Bucket> comparator() {
return (lhs, rhs) -> {
final SortValue l = path.resolveValue(lhs.getAggregations());
final SortValue r = path.resolveValue(rhs.getAggregations());
int compareResult = l.compareTo(r);
return order == SortOrder.ASC ? compareResult : -compareResult;
};
}
@Override
<B extends InternalMultiBucketAggregation.InternalBucket> Comparator<DelayedBucket<B>> delayedBucketComparator(
BiFunction<List<B>, AggregationReduceContext, B> reduce,
AggregationReduceContext reduceContext
) {
Comparator<Bucket> comparator = comparator();
/*
* Reduce the buckets if we haven't already so we can get at the
* sub-aggregations. With enough code we could avoid this but
* we haven't written that code....
*/
return (lhs, rhs) -> comparator.compare(lhs.reduced(reduce, reduceContext), rhs.reduced(reduce, reduceContext));
}
@Override
byte id() {
return ID;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.startObject().field(path.toString(), order.toString()).endObject();
}
@Override
public int hashCode() {
return Objects.hash(path, order);
}
@Override
public boolean equals(Object obj) {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Aggregation other = (Aggregation) obj;
return Objects.equals(path, other.path) && Objects.equals(order, other.order);
}
}
/**
* {@link Bucket} ordering strategy to sort by multiple criteria.
*/
public static
|
Aggregation
|
java
|
apache__camel
|
components/camel-aws/camel-aws-bedrock/src/test/java/org/apache/camel/component/aws2/bedrock/runtime/stream/ConverseStreamHandlerTest.java
|
{
"start": 1171,
"end": 3130
}
|
class ____ {
@Test
void testCreateCompleteHandler() {
ConverseStreamHandler.StreamMetadata metadata = new ConverseStreamHandler.StreamMetadata();
StringBuilder fullText = new StringBuilder();
ConverseStreamResponseHandler handler = ConverseStreamHandler.createCompleteHandler(metadata, fullText);
assertNotNull(handler, "Handler should not be null");
assertNotNull(metadata, "Metadata should not be null");
assertNotNull(fullText, "Full text builder should not be null");
}
@Test
void testCreateChunksHandler() {
ConverseStreamHandler.StreamMetadata metadata = new ConverseStreamHandler.StreamMetadata();
List<String> chunks = new ArrayList<>();
ConverseStreamResponseHandler handler
= ConverseStreamHandler.createChunksHandler(metadata, chunks, null);
assertNotNull(handler, "Handler should not be null");
assertNotNull(metadata, "Metadata should not be null");
assertNotNull(chunks, "Chunks list should not be null");
}
@Test
void testStreamMetadata() {
ConverseStreamHandler.StreamMetadata metadata = new ConverseStreamHandler.StreamMetadata();
// Test setting and getting fullText
metadata.setFullText("Test response");
assertEquals("Test response", metadata.getFullText());
// Test setting and getting chunks
List<String> chunks = List.of("chunk1", "chunk2");
metadata.setChunks(chunks);
assertEquals(chunks, metadata.getChunks());
// Test setting and getting stopReason
metadata.setStopReason("end_turn");
assertEquals("end_turn", metadata.getStopReason());
// Test setting and getting chunkCount
metadata.setChunkCount(5);
assertEquals(5, metadata.getChunkCount());
// Test usage is null initially
assertEquals(null, metadata.getUsage());
}
}
|
ConverseStreamHandlerTest
|
java
|
alibaba__nacos
|
console/src/test/java/com/alibaba/nacos/console/config/ConsoleFunctionEnabledConfigTest.java
|
{
"start": 861,
"end": 1239
}
|
class ____ {
ConsoleFunctionEnabledConfig consoleFunctionEnabledConfig;
@BeforeEach
void setUp() {
consoleFunctionEnabledConfig = new ConsoleFunctionEnabledConfig();
}
@Test
void selectorManager() {
assertInstanceOf(SelectorManager.class, consoleFunctionEnabledConfig.selectorManager());
}
}
|
ConsoleFunctionEnabledConfigTest
|
java
|
grpc__grpc-java
|
interop-testing/src/main/java/io/grpc/testing/integration/AbstractInteropTest.java
|
{
"start": 7458,
"end": 7757
}
|
class ____ {
final String fullMethodName;
final InteropServerStreamTracer tracer;
ServerStreamTracerInfo(String fullMethodName, InteropServerStreamTracer tracer) {
this.fullMethodName = fullMethodName;
this.tracer = tracer;
}
private static final
|
ServerStreamTracerInfo
|
java
|
quarkusio__quarkus
|
integration-tests/hibernate-orm-panache/src/main/java/io/quarkus/it/panache/defaultpu/AccessorEntity.java
|
{
"start": 131,
"end": 1185
}
|
class ____ extends GenericEntity<Integer> {
public String string;
public char c;
public boolean bool;
public byte b;
public short s;
public int i;
public long l;
public float f;
public double d;
@Transient
public Object trans;
@Transient
public Object trans2;
// FIXME: those appear to be mapped by hibernate
transient int getBCalls = 0;
transient int setICalls = 0;
transient int getTransCalls = 0;
transient int setTransCalls = 0;
public void method() {
// touch some fields
@SuppressWarnings("unused")
byte b2 = b;
i = 2;
t = 1;
t2 = 2;
}
// explicit getter or setter
public byte getB() {
getBCalls++;
return b;
}
public void setI(int i) {
setICalls++;
this.i = i;
}
public Object getTrans() {
getTransCalls++;
return trans;
}
public void setTrans(Object trans) {
setTransCalls++;
this.trans = trans;
}
}
|
AccessorEntity
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmnode/RMNodeImpl.java
|
{
"start": 34262,
"end": 37707
}
|
class ____ implements
MultipleArcTransition<RMNodeImpl, RMNodeEvent, NodeState> {
@Override
public NodeState transition(RMNodeImpl rmNode, RMNodeEvent event) {
// Inform the scheduler
RMNodeStartedEvent startEvent = (RMNodeStartedEvent) event;
List<NMContainerStatus> containers = null;
NodeId nodeId = rmNode.nodeId;
RMNode previousRMNode =
rmNode.context.getInactiveRMNodes().remove(nodeId);
if (previousRMNode != null) {
rmNode.updateMetricsForRejoinedNode(previousRMNode.getState());
} else {
NodeId unknownNodeId =
NodesListManager.createUnknownNodeId(nodeId.getHost());
previousRMNode =
rmNode.context.getInactiveRMNodes().remove(unknownNodeId);
if (previousRMNode != null) {
ClusterMetrics.getMetrics().decrDecommisionedNMs();
}
// Check if the node was lost before
NodeId lostNodeId = NodesListManager.createLostNodeId(nodeId.getHost());
RMNode previousRMLostNode = rmNode.context.getInactiveRMNodes().remove(lostNodeId);
if (previousRMLostNode != null) {
// Remove the record of the lost node and update the metrics
rmNode.context.getRMNodes().remove(lostNodeId);
ClusterMetrics.getMetrics().decrNumLostNMs();
}
containers = startEvent.getNMContainerStatuses();
final Resource allocatedResource = Resource.newInstance(
Resources.none());
if (containers != null && !containers.isEmpty()) {
for (NMContainerStatus container : containers) {
if (container.getContainerState() == ContainerState.NEW ||
container.getContainerState() == ContainerState.RUNNING) {
Resources.addTo(allocatedResource,
container.getAllocatedResource());
if (container.getContainerState() == ContainerState.RUNNING) {
rmNode.launchedContainers.add(container.getContainerId());
}
}
}
}
rmNode.allocatedContainerResource = allocatedResource;
}
if (null != startEvent.getRunningApplications()) {
for (ApplicationId appId : startEvent.getRunningApplications()) {
handleRunningAppOnNode(rmNode, rmNode.context, appId, rmNode.nodeId);
}
}
NodeState nodeState;
NodeStatus nodeStatus =
startEvent.getNodeStatus();
if (nodeStatus == null) {
nodeState = NodeState.RUNNING;
reportNodeRunning(rmNode, containers);
} else {
RMNodeStatusEvent rmNodeStatusEvent =
new RMNodeStatusEvent(nodeId, nodeStatus);
NodeHealthStatus nodeHealthStatus =
updateRMNodeFromStatusEvents(rmNode, rmNodeStatusEvent);
if (nodeHealthStatus.getIsNodeHealthy()) {
nodeState = NodeState.RUNNING;
reportNodeRunning(rmNode, containers);
} else {
nodeState = NodeState.UNHEALTHY;
reportNodeUnusable(rmNode, nodeState);
}
}
List<LogAggregationReport> logAggregationReportsForApps =
startEvent.getLogAggregationReportsForApps();
if (logAggregationReportsForApps != null
&& !logAggregationReportsForApps.isEmpty()) {
rmNode.handleLogAggregationStatus(logAggregationReportsForApps);
}
return nodeState;
}
}
public static
|
AddNodeTransition
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/inheritance/joined/relation/ParentIngEntity.java
|
{
"start": 563,
"end": 1846
}
|
class ____ {
@Id
private Integer id;
@Basic
private String data;
@ManyToOne
private ReferencedEntity referenced;
public ParentIngEntity() {
}
public ParentIngEntity(Integer id, String data) {
this.id = id;
this.data = data;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public ReferencedEntity getReferenced() {
return referenced;
}
public void setReferenced(ReferencedEntity referenced) {
this.referenced = referenced;
}
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof ParentIngEntity) ) {
return false;
}
ParentIngEntity that = (ParentIngEntity) o;
if ( data != null ? !data.equals( that.data ) : that.data != null ) {
return false;
}
if ( id != null ? !id.equals( that.id ) : that.id != null ) {
return false;
}
return true;
}
public int hashCode() {
int result;
result = (id != null ? id.hashCode() : 0);
result = 31 * result + (data != null ? data.hashCode() : 0);
return result;
}
public String toString() {
return "ParentIngEntity(id = " + getId() + ", data = " + getData() + ")";
}
}
|
ParentIngEntity
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Merger.java
|
{
"start": 15343,
"end": 21197
}
|
class ____<K extends Object, V extends Object>
extends PriorityQueue<Segment<K, V>> implements RawKeyValueIterator {
Configuration conf;
FileSystem fs;
CompressionCodec codec;
List<Segment<K, V>> segments = new ArrayList<Segment<K,V>>();
RawComparator<K> comparator;
private long totalBytesProcessed;
private float progPerByte;
private Progress mergeProgress = new Progress();
Progressable reporter;
DataInputBuffer key;
final DataInputBuffer value = new DataInputBuffer();
final DataInputBuffer diskIFileValue = new DataInputBuffer();
// Boolean variable for including/considering final merge as part of sort
// phase or not. This is true in map task, false in reduce task. It is
// used in calculating mergeProgress.
private boolean includeFinalMerge = false;
/**
* Sets the boolean variable includeFinalMerge to true. Called from
* map task before calling merge() so that final merge of map task
* is also considered as part of sort phase.
*/
private void considerFinalMergeForProgress() {
includeFinalMerge = true;
}
Segment<K, V> minSegment;
Comparator<Segment<K, V>> segmentComparator =
new Comparator<Segment<K, V>>() {
public int compare(Segment<K, V> o1, Segment<K, V> o2) {
if (o1.getLength() == o2.getLength()) {
return 0;
}
return o1.getLength() < o2.getLength() ? -1 : 1;
}
};
public MergeQueue(Configuration conf, FileSystem fs,
Path[] inputs, boolean deleteInputs,
CompressionCodec codec, RawComparator<K> comparator,
Progressable reporter)
throws IOException {
this(conf, fs, inputs, deleteInputs, codec, comparator, reporter, null,
TaskType.REDUCE);
}
public MergeQueue(Configuration conf, FileSystem fs,
Path[] inputs, boolean deleteInputs,
CompressionCodec codec, RawComparator<K> comparator,
Progressable reporter,
Counters.Counter mergedMapOutputsCounter,
TaskType taskType)
throws IOException {
this.conf = conf;
this.fs = fs;
this.codec = codec;
this.comparator = comparator;
this.reporter = reporter;
if (taskType == TaskType.MAP) {
considerFinalMergeForProgress();
}
for (Path file : inputs) {
LOG.debug("MergeQ: adding: " + file);
segments.add(new Segment<K, V>(conf, fs, file, codec, !deleteInputs,
(file.toString().endsWith(
Task.MERGED_OUTPUT_PREFIX) ?
null : mergedMapOutputsCounter)));
}
// Sort segments on file-lengths
Collections.sort(segments, segmentComparator);
}
public MergeQueue(Configuration conf, FileSystem fs,
List<Segment<K, V>> segments, RawComparator<K> comparator,
Progressable reporter) {
this(conf, fs, segments, comparator, reporter, false, TaskType.REDUCE);
}
public MergeQueue(Configuration conf, FileSystem fs,
List<Segment<K, V>> segments, RawComparator<K> comparator,
Progressable reporter, boolean sortSegments, TaskType taskType) {
this.conf = conf;
this.fs = fs;
this.comparator = comparator;
this.segments = segments;
this.reporter = reporter;
if (taskType == TaskType.MAP) {
considerFinalMergeForProgress();
}
if (sortSegments) {
Collections.sort(segments, segmentComparator);
}
}
public MergeQueue(Configuration conf, FileSystem fs,
List<Segment<K, V>> segments, RawComparator<K> comparator,
Progressable reporter, boolean sortSegments, CompressionCodec codec,
TaskType taskType) {
this(conf, fs, segments, comparator, reporter, sortSegments,
taskType);
this.codec = codec;
}
public void close() throws IOException {
Segment<K, V> segment;
while((segment = pop()) != null) {
segment.close();
}
}
public DataInputBuffer getKey() throws IOException {
return key;
}
public DataInputBuffer getValue() throws IOException {
return value;
}
private void adjustPriorityQueue(Segment<K, V> reader) throws IOException{
long startPos = reader.getReader().bytesRead;
boolean hasNext = reader.nextRawKey();
long endPos = reader.getReader().bytesRead;
totalBytesProcessed += endPos - startPos;
mergeProgress.set(Math.min(1.0f, totalBytesProcessed * progPerByte));
if (hasNext) {
adjustTop();
} else {
pop();
reader.close();
}
}
private void resetKeyValue() {
key = null;
value.reset(new byte[] {}, 0);
diskIFileValue.reset(new byte[] {}, 0);
}
public boolean next() throws IOException {
if (size() == 0) {
resetKeyValue();
return false;
}
if (minSegment != null) {
//minSegment is non-null for all invocations of next except the first
//one. For the first invocation, the priority queue is ready for use
//but for the subsequent invocations, first adjust the queue
adjustPriorityQueue(minSegment);
if (size() == 0) {
minSegment = null;
resetKeyValue();
return false;
}
}
minSegment = top();
long startPos = minSegment.getReader().bytesRead;
key = minSegment.getKey();
if (!minSegment.inMemory()) {
//When we load the value from an inmemory segment, we reset
//the "value" DIB in this
|
MergeQueue
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/ExecutionVertexVersioner.java
|
{
"start": 1512,
"end": 4079
}
|
class ____ {
private final Map<ExecutionVertexID, Long> executionVertexToVersion = new HashMap<>();
public ExecutionVertexVersion recordModification(final ExecutionVertexID executionVertexId) {
final Long newVersion = executionVertexToVersion.merge(executionVertexId, 1L, Long::sum);
return new ExecutionVertexVersion(executionVertexId, newVersion);
}
public Map<ExecutionVertexID, ExecutionVertexVersion> recordVertexModifications(
final Collection<ExecutionVertexID> vertices) {
return vertices.stream()
.map(this::recordModification)
.collect(
Collectors.toMap(
ExecutionVertexVersion::getExecutionVertexId, Function.identity()));
}
public boolean isModified(final ExecutionVertexVersion executionVertexVersion) {
final Long currentVersion =
getCurrentVersion(executionVertexVersion.getExecutionVertexId());
return currentVersion != executionVertexVersion.getVersion();
}
private Long getCurrentVersion(ExecutionVertexID executionVertexId) {
final Long currentVersion = executionVertexToVersion.get(executionVertexId);
Preconditions.checkState(
currentVersion != null,
"Execution vertex %s does not have a recorded version",
executionVertexId);
return currentVersion;
}
public Set<ExecutionVertexID> getUnmodifiedExecutionVertices(
final Set<ExecutionVertexVersion> executionVertexVersions) {
return executionVertexVersions.stream()
.filter(executionVertexVersion -> !isModified(executionVertexVersion))
.map(ExecutionVertexVersion::getExecutionVertexId)
.collect(Collectors.toSet());
}
public Map<ExecutionVertexID, ExecutionVertexVersion> getExecutionVertexVersions(
Collection<ExecutionVertexID> executionVertexIds) {
return executionVertexIds.stream()
.map(id -> new ExecutionVertexVersion(id, getCurrentVersion(id)))
.collect(
Collectors.toMap(
ExecutionVertexVersion::getExecutionVertexId, Function.identity()));
}
public ExecutionVertexVersion getExecutionVertexVersion(ExecutionVertexID executionVertexId) {
final long currentVersion = getCurrentVersion(executionVertexId);
return new ExecutionVertexVersion(executionVertexId, currentVersion);
}
}
|
ExecutionVertexVersioner
|
java
|
apache__maven
|
impl/maven-core/src/test/java/org/apache/maven/classrealm/DefaultClassRealmManagerTest.java
|
{
"start": 5047,
"end": 5363
}
|
class ____ {}", "maven.api");
verifier.verify(logger, calls(1)).debug(" Imported: {} < {}", "group1:artifact1", "test");
verifier.verify(logger, calls(1)).debug(" Excluded: {}", "group1:artifact2:ext:classifier1:null");
verifier.verify(logger, calls(1))
.debug("Populating
|
realm
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/node/JsonNodeFactory.java
|
{
"start": 195,
"end": 967
}
|
class ____ specifies methods for getting access to
* Node instances (newly constructed, or shared, depending
* on type), as well as basic implementation of the methods.
* Designed to be sub-classed if extended functionality (additions
* to behavior of node types, mostly) is needed.
*<p>
* Note that behavior of "exact BigDecimal value" (aka
* "strip trailing zeroes of BigDecimal or not") changed in 3.0:
* new {@link tools.jackson.databind.cfg.JsonNodeFeature#STRIP_TRAILING_BIGDECIMAL_ZEROES}
* setting is used to externally configure this behavior.
* Note, too, that this factory will no longer handle this normalization
* (if enabled): caller (like {@link tools.jackson.databind.deser.jackson.JsonNodeDeserializer})
* is expected to handle it.
*/
public
|
that
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/TestingResourceManagerService.java
|
{
"start": 2401,
"end": 5506
}
|
class ____ implements ResourceManagerService {
private static final Duration TIMEOUT = Duration.ofSeconds(10L);
private final ResourceManagerServiceImpl rmService;
private final TestingLeaderElection leaderElection;
private final TestingFatalErrorHandler fatalErrorHandler;
private final RpcService rpcService;
private final boolean needStopRpcService;
private TestingResourceManagerService(
ResourceManagerServiceImpl rmService,
TestingLeaderElection leaderElection,
TestingFatalErrorHandler fatalErrorHandler,
RpcService rpcService,
boolean needStopRpcService) {
this.rmService = rmService;
this.leaderElection = leaderElection;
this.fatalErrorHandler = fatalErrorHandler;
this.rpcService = rpcService;
this.needStopRpcService = needStopRpcService;
}
@Override
public void start() throws Exception {
rmService.start();
}
@Override
public CompletableFuture<Void> getTerminationFuture() {
return rmService.getTerminationFuture();
}
@Override
public CompletableFuture<Void> deregisterApplication(
ApplicationStatus applicationStatus, @Nullable String diagnostics) {
return rmService.deregisterApplication(applicationStatus, diagnostics);
}
@Override
public CompletableFuture<Void> closeAsync() {
return rmService.closeAsync();
}
public Optional<ResourceManagerGateway> getResourceManagerGateway() {
return getResourceManagerOpt().map(rm -> rm.getSelfGateway(ResourceManagerGateway.class));
}
public Optional<ResourceManagerId> getResourceManagerFencingToken() {
return getResourceManagerOpt().map(FencedRpcEndpoint::getFencingToken);
}
public Optional<CompletableFuture<Void>> getResourceManagerTerminationFuture() {
return getResourceManagerOpt().map(RpcEndpoint::getTerminationFuture);
}
private Optional<ResourceManager<?>> getResourceManagerOpt() {
return Optional.ofNullable(rmService.getLeaderResourceManager());
}
public CompletableFuture<LeaderInformation> isLeader(UUID uuid) {
return leaderElection.isLeader(uuid);
}
public void notLeader() {
leaderElection.notLeader();
}
public void rethrowFatalErrorIfAny() throws Exception {
if (fatalErrorHandler.hasExceptionOccurred()) {
fatalErrorHandler.rethrowError();
}
}
public void ignoreFatalErrors() {
fatalErrorHandler.clearError();
}
public void cleanUp() throws Exception {
rmService
.closeAsync()
.thenCompose((ignore) -> this.stopRpcServiceIfNeeded())
.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
}
private CompletableFuture<Void> stopRpcServiceIfNeeded() {
return needStopRpcService ? rpcService.closeAsync() : FutureUtils.completedVoidFuture();
}
public static Builder newBuilder() {
return new Builder();
}
public static
|
TestingResourceManagerService
|
java
|
processing__processing4
|
java/libraries/io/src/processing/io/I2C.java
|
{
"start": 4355,
"end": 9753
}
|
interface ____ a
* time.
*
* @webref I2C
* @webBrief Closes the I2C device
*/
public void close() {
if (NativeInterface.isSimulated()) {
return;
}
NativeInterface.closeDevice(handle);
handle = 0;
}
protected void finalize() throws Throwable {
try {
close();
} finally {
super.finalize();
}
}
/**
* Ends the current transmissions<br/>
* <br/>
* This executes any queued writes. <a href="I2C_read_.html">Read()</a>
* implicitly ends the current transmission as well, hence calling
* <b>endTransmission()</b> afterwards is not necessary.
*
* @see beginTransmission
* @see write
* @webref I2C
* @webBrief Ends the current transmissions
*/
public void endTransmission() {
if (!transmitting) {
// silently ignore this case
return;
}
if (NativeInterface.isSimulated()) {
return;
}
// implement these flags if needed: https://github.com/raspberrypi/linux/blob/rpi-patches/Documentation/i2c/i2c-protocol
int ret = NativeInterface.transferI2c(handle, slave, out, null);
transmitting = false;
out = null;
if (ret < 0) {
if (ret == -5 | ret == -121) { // EIO | EREMOTEIO
System.err.println("The device did not respond. Check the cabling and whether you are using the correct address.");
}
throw new RuntimeException(NativeInterface.getError(ret));
}
}
/**
* Lists all available I2C interfaces
* @return String array
* @webref I2C
* @webBrief Lists all available I2C interfaces
*/
public static String[] list() {
if (NativeInterface.isSimulated()) {
// as on the Raspberry Pi
return new String[]{ "i2c-1" };
}
ArrayList<String> devs = new ArrayList<String>();
File dir = new File("/dev");
File[] files = dir.listFiles();
if (files != null) {
for (File file : files) {
if (file.getName().startsWith("i2c-")) {
devs.add(file.getName());
}
}
}
// listFiles() does not guarantee ordering
String[] tmp = devs.toArray(new String[devs.size()]);
Arrays.sort(tmp);
return tmp;
}
/**
* Read bytes from the attached device<br/>
* <br/>
* You must call <b>beginTransmission()</b> before calling this function. This function
* also ends the current transmission and sends any data that was queued using
* <b>write()</b> before. It is not necessary to call
* <a href="I2C_endTransmission_.html">endTransmission()</a> after <b>read()</b>.
*
* @param len number of bytes to read
* @return bytes read from device
* @see beginTransmission
* @see write
* @see endTransmission
* @webref I2C
* @webBrief Read bytes from the attached device
*/
public byte[] read(int len) {
if (!transmitting) {
throw new RuntimeException("beginTransmisson has not been called");
}
byte[] in = new byte[len];
if (NativeInterface.isSimulated()) {
return in;
}
int ret = NativeInterface.transferI2c(handle, slave, out, in);
transmitting = false;
out = null;
if (ret < 0) {
if (ret == -5 | ret == -121) { // EIO | EREMOTEIO
System.err.println("The device did not respond. Check the cabling and whether you are using the correct address.");
}
throw new RuntimeException(NativeInterface.getError(ret));
}
return in;
}
/**
* Add bytes to be written to the device<br/>
* <br/>
* You must call <b>beginTransmission()</b> before calling this function. The actual
* writing takes part when <b>read()</b> or <b>endTransmission()</b> is being called.
*
* @param out bytes to be written
* @see beginTransmission
* @see read
* @see endTransmission
* @webref I2C
* @webBrief Add bytes to be written to the device
*/
public void write(byte[] out) {
if (!transmitting) {
throw new RuntimeException("beginTransmisson has not been called");
}
if (this.out == null) {
this.out = out;
} else {
byte[] tmp = new byte[this.out.length + out.length];
System.arraycopy(this.out, 0, tmp, 0, this.out.length);
System.arraycopy(out, 0, tmp, this.out.length, out.length);
this.out = tmp;
}
}
/**
* Adds bytes to be written to the attached device
* @param out string to be written
* @see beginTransmission
* @see read
* @see endTransmission
*/
public void write(String out) {
write(out.getBytes());
}
/**
* Adds a byte to be written to the attached device
* @param out single byte to be written, e.g. numeric literal (0 to 255, or -128 to 127)
* @see beginTransmission
* @see read
* @see endTransmission
*/
public void write(int out) {
if (out < -128 || 255 < out) {
System.err.println("The write function can only operate on a single byte at a time. Call it with a value from 0 to 255, or -128 to 127.");
throw new RuntimeException("Argument does not fit into a single byte");
}
byte[] tmp = new byte[1];
tmp[0] = (byte)out;
write(tmp);
}
/**
* Adds a byte to be written to the attached device
* @param out single byte to be written
* @see beginTransmission
* @see read
* @see endTransmission
*/
public void write(byte out) {
// cast to (unsigned) int
write(out & 0xff);
}
}
|
at
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/rest/util/NoOpFatalErrorHandler.java
|
{
"start": 968,
"end": 1116
}
|
enum ____ implements FatalErrorHandler {
INSTANCE;
@Override
public void onFatalError(final Throwable exception) {}
}
|
NoOpFatalErrorHandler
|
java
|
spring-projects__spring-framework
|
spring-aop/src/main/java/org/springframework/aop/aspectj/annotation/InstantiationModelAwarePointcutAdvisorImpl.java
|
{
"start": 1720,
"end": 8137
}
|
class ____
implements InstantiationModelAwarePointcutAdvisor, AspectJPrecedenceInformation, Serializable {
private static final Advice EMPTY_ADVICE = new Advice() {};
private final AspectJExpressionPointcut declaredPointcut;
private final Class<?> declaringClass;
private final String methodName;
private final Class<?>[] parameterTypes;
private transient Method aspectJAdviceMethod;
private final AspectJAdvisorFactory aspectJAdvisorFactory;
private final MetadataAwareAspectInstanceFactory aspectInstanceFactory;
private final int declarationOrder;
private final String aspectName;
private final Pointcut pointcut;
private final boolean lazy;
private @Nullable Advice instantiatedAdvice;
@SuppressWarnings("NullAway.Init")
private Boolean isBeforeAdvice;
@SuppressWarnings("NullAway.Init")
private Boolean isAfterAdvice;
public InstantiationModelAwarePointcutAdvisorImpl(AspectJExpressionPointcut declaredPointcut,
Method aspectJAdviceMethod, AspectJAdvisorFactory aspectJAdvisorFactory,
MetadataAwareAspectInstanceFactory aspectInstanceFactory, int declarationOrder, String aspectName) {
this.declaredPointcut = declaredPointcut;
this.declaringClass = aspectJAdviceMethod.getDeclaringClass();
this.methodName = aspectJAdviceMethod.getName();
this.parameterTypes = aspectJAdviceMethod.getParameterTypes();
this.aspectJAdviceMethod = aspectJAdviceMethod;
this.aspectJAdvisorFactory = aspectJAdvisorFactory;
this.aspectInstanceFactory = aspectInstanceFactory;
this.declarationOrder = declarationOrder;
this.aspectName = aspectName;
if (aspectInstanceFactory.getAspectMetadata().isLazilyInstantiated()) {
// Static part of the pointcut is a lazy type.
Pointcut preInstantiationPointcut = Pointcuts.union(
aspectInstanceFactory.getAspectMetadata().getPerClausePointcut(), this.declaredPointcut);
// Make it dynamic: must mutate from pre-instantiation to post-instantiation state.
// If it's not a dynamic pointcut, it may be optimized out
// by the Spring AOP infrastructure after the first evaluation.
this.pointcut = new PerTargetInstantiationModelPointcut(
this.declaredPointcut, preInstantiationPointcut, aspectInstanceFactory);
this.lazy = true;
}
else {
// A singleton aspect.
this.pointcut = this.declaredPointcut;
this.lazy = false;
this.instantiatedAdvice = instantiateAdvice(this.declaredPointcut);
}
}
/**
* The pointcut for Spring AOP to use.
* Actual behaviour of the pointcut will change depending on the state of the advice.
*/
@Override
public Pointcut getPointcut() {
return this.pointcut;
}
@Override
public boolean isLazy() {
return this.lazy;
}
@Override
public synchronized boolean isAdviceInstantiated() {
return (this.instantiatedAdvice != null);
}
/**
* Lazily instantiate advice if necessary.
*/
@Override
public synchronized Advice getAdvice() {
if (this.instantiatedAdvice == null) {
this.instantiatedAdvice = instantiateAdvice(this.declaredPointcut);
}
return this.instantiatedAdvice;
}
private Advice instantiateAdvice(AspectJExpressionPointcut pointcut) {
Advice advice = this.aspectJAdvisorFactory.getAdvice(this.aspectJAdviceMethod, pointcut,
this.aspectInstanceFactory, this.declarationOrder, this.aspectName);
return (advice != null ? advice : EMPTY_ADVICE);
}
/**
* This is only of interest for Spring AOP: AspectJ instantiation semantics
* are much richer. In AspectJ terminology, all a return of {@code true}
* means here is that the aspect is not a SINGLETON.
*/
@Override
public boolean isPerInstance() {
return (getAspectMetadata().getAjType().getPerClause().getKind() != PerClauseKind.SINGLETON);
}
/**
* Return the AspectJ AspectMetadata for this advisor.
*/
public AspectMetadata getAspectMetadata() {
return this.aspectInstanceFactory.getAspectMetadata();
}
public MetadataAwareAspectInstanceFactory getAspectInstanceFactory() {
return this.aspectInstanceFactory;
}
public AspectJExpressionPointcut getDeclaredPointcut() {
return this.declaredPointcut;
}
@Override
public int getOrder() {
return this.aspectInstanceFactory.getOrder();
}
@Override
public String getAspectName() {
return this.aspectName;
}
@Override
public int getDeclarationOrder() {
return this.declarationOrder;
}
@Override
public boolean isBeforeAdvice() {
if (this.isBeforeAdvice == null) {
determineAdviceType();
}
return this.isBeforeAdvice;
}
@Override
public boolean isAfterAdvice() {
if (this.isAfterAdvice == null) {
determineAdviceType();
}
return this.isAfterAdvice;
}
/**
* Duplicates some logic from getAdvice, but importantly does not force
* creation of the advice.
*/
private void determineAdviceType() {
AspectJAnnotation aspectJAnnotation =
AbstractAspectJAdvisorFactory.findAspectJAnnotationOnMethod(this.aspectJAdviceMethod);
if (aspectJAnnotation == null) {
this.isBeforeAdvice = false;
this.isAfterAdvice = false;
}
else {
switch (aspectJAnnotation.getAnnotationType()) {
case AtPointcut, AtAround -> {
this.isBeforeAdvice = false;
this.isAfterAdvice = false;
}
case AtBefore -> {
this.isBeforeAdvice = true;
this.isAfterAdvice = false;
}
case AtAfter, AtAfterReturning, AtAfterThrowing -> {
this.isBeforeAdvice = false;
this.isAfterAdvice = true;
}
}
}
}
private void readObject(ObjectInputStream inputStream) throws IOException, ClassNotFoundException {
inputStream.defaultReadObject();
try {
this.aspectJAdviceMethod = this.declaringClass.getMethod(this.methodName, this.parameterTypes);
}
catch (NoSuchMethodException ex) {
throw new IllegalStateException("Failed to find advice method on deserialization", ex);
}
}
@Override
public String toString() {
return "InstantiationModelAwarePointcutAdvisor: expression [" + getDeclaredPointcut().getExpression() +
"]; advice method [" + this.aspectJAdviceMethod + "]; perClauseKind=" +
this.aspectInstanceFactory.getAspectMetadata().getAjType().getPerClause().getKind();
}
/**
* Pointcut implementation that changes its behaviour when the advice is instantiated.
* Note that this is a <i>dynamic</i> pointcut; otherwise it might be optimized out
* if it does not at first match statically.
*/
private static final
|
InstantiationModelAwarePointcutAdvisorImpl
|
java
|
quarkusio__quarkus
|
integration-tests/hibernate-orm-tenancy/connection-resolver-legacy-qualifiers/src/main/java/io/quarkus/it/hibernate/multitenancy/Producers.java
|
{
"start": 395,
"end": 1198
}
|
class ____ {
@Inject
ConnectionConfig config;
@Produces
@Unremovable
@ApplicationScoped
@Default
CustomTenantConnectionResolver defaultConnectionResolver() {
return new CustomTenantConnectionResolver(config, "default");
}
void disposeDefaultConnectionResolver(@Disposes @Default CustomTenantConnectionResolver resolver) {
resolver.close();
}
@Produces
@Unremovable
@ApplicationScoped
@PersistenceUnit("inventory")
CustomTenantConnectionResolver inventoryConnectionResolver() {
return new CustomTenantConnectionResolver(config, "inventory");
}
void disposeInventoryConnectionResolver(@Disposes @PersistenceUnit("inventory") CustomTenantConnectionResolver resolver) {
resolver.close();
}
}
|
Producers
|
java
|
apache__dubbo
|
dubbo-registry/dubbo-registry-api/src/test/java/org/apache/dubbo/registry/service/DemoService.java
|
{
"start": 853,
"end": 912
}
|
interface ____ {
String sayHello(String str);
}
|
DemoService
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/UnaryGeoFunction.java
|
{
"start": 1130,
"end": 1226
}
|
class ____ functions that get a single geo shape or geo point as an argument
*/
public abstract
|
for
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/type/EnumTypeHandler.java
|
{
"start": 851,
"end": 2015
}
|
class ____<E extends Enum<E>> extends BaseTypeHandler<E> {
private final Class<E> type;
public EnumTypeHandler(Class<E> type) {
if (type == null) {
throw new IllegalArgumentException("Type argument cannot be null");
}
this.type = type;
}
@Override
public void setNonNullParameter(PreparedStatement ps, int i, E parameter, JdbcType jdbcType) throws SQLException {
if (jdbcType == null) {
ps.setString(i, parameter.name());
} else {
ps.setObject(i, parameter.name(), jdbcType.TYPE_CODE); // see r3589
}
}
@Override
public E getNullableResult(ResultSet rs, String columnName) throws SQLException {
String s = rs.getString(columnName);
return s == null ? null : Enum.valueOf(type, s);
}
@Override
public E getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
String s = rs.getString(columnIndex);
return s == null ? null : Enum.valueOf(type, s);
}
@Override
public E getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {
String s = cs.getString(columnIndex);
return s == null ? null : Enum.valueOf(type, s);
}
}
|
EnumTypeHandler
|
java
|
apache__dubbo
|
dubbo-remoting/dubbo-remoting-http12/src/main/java/org/apache/dubbo/remoting/http12/LimitedByteBufOutputStream.java
|
{
"start": 1035,
"end": 1898
}
|
class ____ extends ByteBufOutputStream {
private final int capacity;
public LimitedByteBufOutputStream(ByteBuf byteBuf, int capacity) {
super(byteBuf);
this.capacity = capacity == 0 ? Integer.MAX_VALUE : capacity;
}
@Override
public void write(int b) throws IOException {
ensureCapacity(1);
super.write(b);
}
@Override
public void write(byte[] b) throws IOException {
ensureCapacity(b.length);
super.write(b);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
ensureCapacity(len);
super.write(b, off, len);
}
private void ensureCapacity(int len) {
if (writtenBytes() + len > capacity) {
throw new HttpOverPayloadException("Response Entity Too Large");
}
}
}
|
LimitedByteBufOutputStream
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ArangoDbEndpointBuilderFactory.java
|
{
"start": 14603,
"end": 16925
}
|
class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final ArangoDbHeaderNameBuilder INSTANCE = new ArangoDbHeaderNameBuilder();
/**
* Indicates if there are multiple documents to update. If set to true,
* the body of the message must be a Collection of documents to update.
*
* The option is a: {@code java.lang.Boolean} type.
*
* Default: false
* Group: producer
*
* @return the name of the header {@code ArangoDbMultiUpdate}.
*/
public String arangoDbMultiUpdate() {
return "CamelArangoDbMultiUpdate";
}
/**
* Indicates if there are multiple documents to insert. If set to true,
* the body of the message must be a Collection of documents to insert.
*
* The option is a: {@code java.lang.Boolean} type.
*
* Default: false
* Group: producer
*
* @return the name of the header {@code ArangoDbMultiInsert}.
*/
public String arangoDbMultiInsert() {
return "CamelArangoDbMultiInsert";
}
/**
* Indicates if there are multiple documents to delete. If set to true,
* the body of the message must be a Collection of key of documents to
* delete.
*
* The option is a: {@code java.lang.Boolean} type.
*
* Default: false
* Group: producer
*
* @return the name of the header {@code ArangoDbMultiDelete}.
*/
public String arangoDbMultiDelete() {
return "CamelArangoDbMultiDelete";
}
/**
* The Arango key to use for the operation.
*
* The option is a: {@code java.lang.String} type.
*
* Group: producer
*
* @return the name of the header {@code key}.
*/
public String key() {
return "key";
}
/**
* The type of the result of the operation.
*
* The option is a: {@code java.lang.Class} type.
*
* Default: BaseDocument.
|
ArangoDbHeaderNameBuilder
|
java
|
google__guava
|
android/guava-testlib/src/com/google/common/collect/testing/features/SetFeature.java
|
{
"start": 1546,
"end": 1648
}
|
interface ____ {
SetFeature[] value() default {};
SetFeature[] absent() default {};
}
}
|
Require
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java
|
{
"start": 1312,
"end": 5375
}
|
class ____ extends ESTestCase {
public void testProcessResults() {
// only the scores of the MASK index array
// are used the rest is filler
double[][][] scores = {
{
{ 0, 0, 0, 0, 0, 0, 0 }, // The
{ 0, 0, 0, 0, 0, 0, 0 }, // capital
{ 0, 0, 0, 0, 0, 0, 0 }, // of
{ 0.01, 0.01, 0.3, 0.01, 0.2, 1.2, 0.1 }, // MASK
{ 0, 0, 0, 0, 0, 0, 0 }, // is
{ 0, 0, 0, 0, 0, 0, 0 } // paris
} };
String input = "The capital of " + BertTokenizer.MASK_TOKEN + " is Paris";
List<String> vocab = Arrays.asList("The", "capital", "of", "is", "Paris", "France", BertTokenizer.MASK_TOKEN);
List<WordPieceTokenFilter.WordPieceToken> tokens = List.of();
int[] tokenMap = new int[] { 0, 1, 2, 3, 4, 5 };
int[] tokenIds = new int[] { 0, 1, 2, 6, 4, 5 };
TokenizationResult tokenization = new BertTokenizationResult(
vocab,
List.of(new TokenizationResult.Tokens(List.of(input), List.of(tokens), false, tokenIds, tokenMap, -1, 0, 0)),
0
);
BertTokenizer tokenizer = mock(BertTokenizer.class);
when(tokenizer.getMaskToken()).thenReturn(BertTokenizer.MASK_TOKEN);
when(tokenizer.getMaskTokenId()).thenReturn(OptionalInt.of(6));
String resultsField = randomAlphaOfLength(10);
FillMaskResults result = (FillMaskResults) FillMaskProcessor.processResult(
tokenization,
new PyTorchInferenceResult(scores),
tokenizer,
4,
resultsField,
false
);
assertThat(result.asMap().get(resultsField), equalTo("France"));
assertThat(result.getTopClasses(), hasSize(4));
assertEquals("France", result.getClassificationLabel());
assertEquals("The capital of France is Paris", result.getPredictedSequence());
TopClassEntry prediction = result.getTopClasses().get(1);
assertEquals("of", prediction.getClassification());
prediction = result.getTopClasses().get(2);
assertEquals("Paris", prediction.getClassification());
}
public void testProcessResults_GivenMissingTokens() {
BertTokenizer tokenizer = mock(BertTokenizer.class);
when(tokenizer.getMaskToken()).thenReturn("[MASK]");
TokenizationResult tokenization = new BertTokenizationResult(
List.of(),
List.of(new TokenizationResult.Tokens(List.of(""), List.of(), false, new int[0], new int[0], -1, 0, 0)),
0
);
PyTorchInferenceResult pyTorchResult = new PyTorchInferenceResult(new double[][][] { { {} } });
expectThrows(
ElasticsearchStatusException.class,
() -> FillMaskProcessor.processResult(tokenization, pyTorchResult, tokenizer, 5, randomAlphaOfLength(10), false)
);
}
public void testValidate_GivenMissingMaskToken() {
List<String> input = List.of("The capital of France is Paris");
BertTokenizer tokenizer = mock(BertTokenizer.class);
when(tokenizer.getMaskToken()).thenReturn("[MASK]");
FillMaskProcessor processor = new FillMaskProcessor(tokenizer);
ValidationException e = expectThrows(ValidationException.class, () -> processor.validateInputs(input));
assertThat(e.getMessage(), containsString("no [MASK] token could be found"));
}
public void testProcessResults_GivenMultipleMaskTokens() {
List<String> input = List.of("The capital of [MASK] is [MASK]");
BertTokenizer tokenizer = mock(BertTokenizer.class);
when(tokenizer.getMaskToken()).thenReturn("[MASK]");
FillMaskProcessor processor = new FillMaskProcessor(tokenizer);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> processor.validateInputs(input));
assertThat(e.getMessage(), containsString("only one [MASK] token should exist in the input"));
}
}
|
FillMaskProcessorTests
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/reference/ReferenceAttributes.java
|
{
"start": 1003,
"end": 3107
}
|
interface ____ {
String ID = "id";
String INTERFACE = "interface";
String INTERFACE_NAME = "interfaceName";
String INTERFACE_CLASS = "interfaceClass";
String ACTUAL_INTERFACE = "actualInterface";
String GENERIC = "generic";
String REGISTRY = "registry";
String REGISTRIES = "registries";
String REGISTRY_IDS = "registryIds";
String GROUP = "group";
String VERSION = "version";
String ARGUMENTS = "arguments";
String METHODS = "methods";
String PARAMETERS = "parameters";
String PROVIDED_BY = "providedBy";
String PROVIDER_PORT = "providerPort";
String URL = "url";
String CLIENT = "client";
// /**
// * When enable, prefer to call local service in the same JVM if it's present, default value is true
// * @deprecated using scope="local" or scope="remote" instead
// */
// @Deprecated
String INJVM = "injvm";
String CHECK = "check";
String INIT = "init";
String LAZY = "lazy";
String STUBEVENT = "stubevent";
String RECONNECT = "reconnect";
String STICKY = "sticky";
String PROXY = "proxy";
String STUB = "stub";
String CLUSTER = "cluster";
String CONNECTIONS = "connections";
String CALLBACKS = "callbacks";
String ONCONNECT = "onconnect";
String ONDISCONNECT = "ondisconnect";
String OWNER = "owner";
String LAYER = "layer";
String RETRIES = "retries";
String LOAD_BALANCE = "loadbalance";
String ASYNC = "async";
String ACTIVES = "actives";
String SENT = "sent";
String MOCK = "mock";
String VALIDATION = "validation";
String TIMEOUT = "timeout";
String CACHE = "cache";
String FILTER = "filter";
String LISTENER = "listener";
String APPLICATION = "application";
String MODULE = "module";
String CONSUMER = "consumer";
String MONITOR = "monitor";
String PROTOCOL = "protocol";
String TAG = "tag";
String MERGER = "merger";
String SERVICES = "services";
String SCOPE = "scope";
}
|
ReferenceAttributes
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/test/java/org/springframework/messaging/support/ExecutorSubscribableChannelTests.java
|
{
"start": 7244,
"end": 8042
}
|
class ____ extends AbstractTestInterceptor {
private Message<?> messageToReturn;
private RuntimeException exceptionToRaise;
public void setMessageToReturn(Message<?> messageToReturn) {
this.messageToReturn = messageToReturn;
}
// TODO Determine why setExceptionToRaise() is unused.
@SuppressWarnings("unused")
public void setExceptionToRaise(RuntimeException exception) {
this.exceptionToRaise = exception;
}
@Override
public Message<?> beforeHandle(Message<?> message, MessageChannel channel, MessageHandler handler) {
super.beforeHandle(message, channel, handler);
if (this.exceptionToRaise != null) {
throw this.exceptionToRaise;
}
return (this.messageToReturn != null ? this.messageToReturn : message);
}
}
private static
|
BeforeHandleInterceptor
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/configuration/ImportedConfigurationClassEnhancementTests.java
|
{
"start": 3623,
"end": 3695
}
|
class ____ extends Config {
}
@Configuration
static
|
ConfigThatDoesImport
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/ser/jdk/JDKArraySerializers.java
|
{
"start": 2900,
"end": 3038
}
|
class ____ for cases where we may add
* type information (excludes boolean/int/double arrays).
*/
protected abstract static
|
used
|
java
|
spring-projects__spring-framework
|
spring-websocket/src/main/java/org/springframework/web/socket/server/support/DefaultHandshakeHandler.java
|
{
"start": 1518,
"end": 2356
}
|
class ____ extends AbstractHandshakeHandler implements ServletContextAware {
private static final boolean JETTY_WS_PRESENT = ClassUtils.isPresent(
"org.eclipse.jetty.ee11.websocket.server.JettyWebSocketServerContainer",
DefaultHandshakeHandler.class.getClassLoader());
public DefaultHandshakeHandler() {
super(JETTY_WS_PRESENT ? new JettyRequestUpgradeStrategy() : new StandardWebSocketUpgradeStrategy());
}
public DefaultHandshakeHandler(RequestUpgradeStrategy requestUpgradeStrategy) {
super(requestUpgradeStrategy);
}
@Override
public void setServletContext(ServletContext servletContext) {
RequestUpgradeStrategy strategy = getRequestUpgradeStrategy();
if (strategy instanceof ServletContextAware servletContextAware) {
servletContextAware.setServletContext(servletContext);
}
}
}
|
DefaultHandshakeHandler
|
java
|
apache__flink
|
flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/operators/GenericWriteAheadSinkTest.java
|
{
"start": 6890,
"end": 7774
}
|
class ____ extends CheckpointCommitter {
private static final long serialVersionUID = 1L;
private List<Tuple2<Long, Integer>> checkpoints;
@Override
public void open() throws Exception {}
@Override
public void close() throws Exception {}
@Override
public void createResource() throws Exception {
checkpoints = new ArrayList<>();
}
@Override
public void commitCheckpoint(int subtaskIdx, long checkpointID) {
checkpoints.add(new Tuple2<>(checkpointID, subtaskIdx));
}
@Override
public boolean isCheckpointCommitted(int subtaskIdx, long checkpointID) {
return checkpoints.contains(new Tuple2<>(checkpointID, subtaskIdx));
}
}
/** Simple sink that stores all records in a public list. */
public static
|
SimpleCommitter
|
java
|
quarkusio__quarkus
|
extensions/grpc/runtime/src/test/java/io/quarkus/grpc/runtime/ClientAndServerCallsTest.java
|
{
"start": 6083,
"end": 6633
}
|
class ____ {
Uni<String> oneToOne(String s) {
return Uni.createFrom().item(s).map(String::toUpperCase);
}
Uni<List<String>> manyToOne(Multi<String> multi) {
return multi.map(String::toUpperCase).collect().asList();
}
Multi<String> oneToMany(String s) {
return Multi.createFrom().items(s, s).map(String::toUpperCase);
}
Multi<String> manyToMany(Multi<String> multi) {
return multi.map(String::toUpperCase);
}
}
static
|
FakeService
|
java
|
apache__camel
|
components/camel-ibm/camel-ibm-watson-speech-to-text/src/generated/java/org/apache/camel/component/ibm/watson/stt/WatsonSpeechToTextEndpointUriFactory.java
|
{
"start": 524,
"end": 2518
}
|
class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":label";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(10);
props.add("apiKey");
props.add("contentType");
props.add("label");
props.add("lazyStartProducer");
props.add("model");
props.add("operation");
props.add("serviceUrl");
props.add("speakerLabels");
props.add("timestamps");
props.add("wordConfidence");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
Set<String> secretProps = new HashSet<>(1);
secretProps.add("apiKey");
SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "ibm-watson-speech-to-text".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "label", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
|
WatsonSpeechToTextEndpointUriFactory
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/engine/transaction/jta/platform/spi/JtaPlatformProvider.java
|
{
"start": 469,
"end": 652
}
|
interface ____ {
/**
* Retrieve the JtaPlatform provided by this environment.
*
* @return The provided JtaPlatform
*/
JtaPlatform getProvidedJtaPlatform();
}
|
JtaPlatformProvider
|
java
|
apache__camel
|
components/camel-pg-replication-slot/src/test/java/org/apache/camel/component/pg/replication/slot/integration/PgReplicationSlotCamelIT.java
|
{
"start": 1241,
"end": 3552
}
|
class ____ extends PgReplicationITSupport {
@EndpointInject("mock:result")
private MockEndpoint mockEndpoint;
private Connection connection;
@Override
@BeforeEach
public void doPostSetup() throws Exception {
String url = String.format("jdbc:postgresql://%s/camel", service.getServiceAddress());
Properties props = new Properties();
props.setProperty("user", service.userName());
props.setProperty("password", service.password());
this.connection = DriverManager.getConnection(url, props);
try (Statement statement = this.connection.createStatement()) {
statement.execute("CREATE TABLE IF NOT EXISTS camel_test_table(id int);");
}
}
@Override
public void doPostTearDown() throws SQLException {
this.connection.close();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
String uriFormat
= "pg-replication-slot://{{postgres.service.address}}/camel/camel_test_slot:test_decoding?"
+ "user={{postgres.user.name}}&password={{postgres.user.password}}"
+ "&slotOptions.skip-empty-xacts=true&slotOptions.include-xids=false";
from(uriFormat).to(mockEndpoint);
}
};
}
@Test
public void canReceiveFromSlot() throws InterruptedException, SQLException {
mockEndpoint.expectedMessageCount(1);
// test_decoding plugin writes each change in a separate message. Some other plugins can have different behaviour,
// wal2json default behaviour is to write the whole transaction in one message.
mockEndpoint.expectedBodiesReceived("BEGIN", "table public.camel_test_table: INSERT: id[integer]:1984", "COMMIT",
"BEGIN", "table public.camel_test_table: INSERT: id[integer]:1998", "COMMIT");
try (Statement statement = this.connection.createStatement()) {
statement.execute("INSERT INTO camel_test_table(id) VALUES(1984);");
statement.execute("INSERT INTO camel_test_table(id) VALUES(1998);");
}
mockEndpoint.assertIsSatisfied(5000);
}
}
|
PgReplicationSlotCamelIT
|
java
|
spring-projects__spring-boot
|
documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/data/sql/r2dbc/MyPostgresR2dbcConfiguration.java
|
{
"start": 1052,
"end": 1398
}
|
class ____ {
@Bean
public ConnectionFactoryOptionsBuilderCustomizer postgresCustomizer() {
Map<String, String> options = new HashMap<>();
options.put("lock_timeout", "30s");
options.put("statement_timeout", "60s");
return (builder) -> builder.option(PostgresqlConnectionFactoryProvider.OPTIONS, options);
}
}
|
MyPostgresR2dbcConfiguration
|
java
|
apache__logging-log4j2
|
log4j-api-test/src/test/java/org/apache/logging/log4j/status/StatusLoggerFailingListenerTest.java
|
{
"start": 1528,
"end": 2675
}
|
class ____ {
public static final StatusLogger STATUS_LOGGER = StatusLogger.getLogger();
private StatusListener listener;
@BeforeEach
void createAndRegisterListener() {
listener = mock(StatusListener.class);
STATUS_LOGGER.registerListener(listener);
}
@AfterEach
void unregisterListener() {
STATUS_LOGGER.removeListener(listener);
}
@Test
void logging_with_failing_listener_should_not_cause_stack_overflow() throws Exception {
// Set up a failing listener on `log(StatusData)`
when(listener.getStatusLevel()).thenReturn(Level.ALL);
final Exception listenerFailure = new RuntimeException("test failure " + Math.random());
doThrow(listenerFailure).when(listener).log(any());
// Log something and verify exception dump
final String stderr = SystemStubs.tapSystemErr(() -> STATUS_LOGGER.error("foo"));
final String listenerFailureClassName = listenerFailure.getClass().getCanonicalName();
assertThat(stderr).contains(listenerFailureClassName + ": " + listenerFailure.getMessage());
}
}
|
StatusLoggerFailingListenerTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/PropertySourceAnnotationTests.java
|
{
"start": 18677,
"end": 18816
}
|
class ____ {
}
@PropertySource("classpath*:org/springframework/context/annotation/p?.properties")
static
|
MultipleComposedAnnotationsConfig
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/deser/bean/BeanDeserializer.java
|
{
"start": 1054,
"end": 59545
}
|
class ____
extends BeanDeserializerBase
{
// @since 3.0
protected PropertyNameMatcher _propNameMatcher;
// @since 3.0
protected SettableBeanProperty[] _propsByIndex;
/**
* State marker we need in order to avoid infinite recursion for some cases
* (not very clean, alas, but has to do for now)
*/
protected volatile transient NameTransformer _currentlyTransforming;
/*
/**********************************************************************
/* Life-cycle, constructors
/**********************************************************************
*/
/**
* Constructor used by {@link BeanDeserializerBuilder}.
*/
public BeanDeserializer(BeanDeserializerBuilder builder, BeanDescription.Supplier beanDescRef,
BeanPropertyMap properties, Map<String, SettableBeanProperty> backRefs,
HashSet<String> ignorableProps, boolean ignoreAllUnknown, Set<String> includableProps,
boolean hasViews)
{
super(builder, beanDescRef, properties, backRefs,
ignorableProps, ignoreAllUnknown, includableProps, hasViews);
}
/**
* Copy-constructor that can be used by sub-classes to allow
* copy-on-write style copying of settings of an existing instance.
*/
protected BeanDeserializer(BeanDeserializer src) {
super(src, src._ignoreAllUnknown);
_propNameMatcher = src._propNameMatcher;
_propsByIndex = src._propsByIndex;
}
protected BeanDeserializer(BeanDeserializer src, boolean ignoreAllUnknown) {
super(src, ignoreAllUnknown);
_propNameMatcher = src._propNameMatcher;
_propsByIndex = src._propsByIndex;
}
protected BeanDeserializer(BeanDeserializer src,
UnwrappedPropertyHandler unwrapHandler, PropertyBasedCreator propertyBasedCreator,
BeanPropertyMap renamedProperties, boolean ignoreAllUnknown) {
super(src, unwrapHandler, propertyBasedCreator, renamedProperties, ignoreAllUnknown);
_propNameMatcher = _beanProperties.getNameMatcher();
_propsByIndex = _beanProperties.getNameMatcherProperties();
}
protected BeanDeserializer(BeanDeserializer src, ObjectIdReader oir) {
super(src, oir);
_propNameMatcher = src._propNameMatcher;
_propsByIndex = src._propsByIndex;
}
protected BeanDeserializer(BeanDeserializer src,
Set<String> ignorableProps, Set<String> includableProps) {
super(src, ignorableProps, includableProps);
_propNameMatcher = src._propNameMatcher;
_propsByIndex = src._propsByIndex;
}
protected BeanDeserializer(BeanDeserializer src, BeanPropertyMap props) {
super(src, props);
_propNameMatcher = _beanProperties.getNameMatcher();
_propsByIndex = _beanProperties.getNameMatcherProperties();
}
/*
/**********************************************************************
/* Life-cycle, mutant factories
/**********************************************************************
*/
@Override
public ValueDeserializer<Object> unwrappingDeserializer(DeserializationContext ctxt,
NameTransformer transformer)
{
// bit kludgy but we don't want to accidentally change type; sub-classes
// MUST override this method to support unwrapped properties...
if (getClass() != BeanDeserializer.class) {
return this;
}
// 25-Mar-2017, tatu: Not clean at all, but for [databind#383] we do need
// to keep track of accidental recursion...
if (_currentlyTransforming == transformer) {
return this;
}
_currentlyTransforming = transformer;
try {
UnwrappedPropertyHandler uwHandler = _unwrappedPropertyHandler;
if (uwHandler != null) { // delegate further unwraps, if any
uwHandler = uwHandler.renameAll(ctxt, transformer);
}
PropertyBasedCreator pbCreator = _propertyBasedCreator;
if (pbCreator != null) {
pbCreator = pbCreator.renameAll(ctxt, transformer);
}
// and handle direct unwrapping as well:
return new BeanDeserializer(this, uwHandler, pbCreator,
_beanProperties.renameAll(ctxt, transformer), true);
} finally { _currentlyTransforming = null; }
}
@Override
public BeanDeserializer withObjectIdReader(ObjectIdReader oir) {
return new BeanDeserializer(this, oir);
}
@Override
public BeanDeserializer withByNameInclusion(Set<String> ignorableProps,
Set<String> includableProps) {
return new BeanDeserializer(this, ignorableProps, includableProps);
}
@Override
public BeanDeserializerBase withIgnoreAllUnknown(boolean ignoreUnknown) {
return new BeanDeserializer(this, ignoreUnknown);
}
@Override
public BeanDeserializerBase withBeanProperties(BeanPropertyMap props) {
return new BeanDeserializer(this, props);
}
@Override
protected BeanDeserializerBase asArrayDeserializer() {
return new BeanAsArrayDeserializer(this, _beanProperties.getPrimaryProperties());
}
/*
/**********************************************************************
/* Life-cycle, initialization
/**********************************************************************
*/
@Override
protected void initNameMatcher(DeserializationContext ctxt) {
_beanProperties.initMatcher(ctxt.tokenStreamFactory());
_propNameMatcher = _beanProperties.getNameMatcher();
_propsByIndex = _beanProperties.getNameMatcherProperties();
}
/*
/**********************************************************************
/* ValueDeserializer implementation
/**********************************************************************
*/
/**
* Main deserialization method for bean-based objects (POJOs).
*/
@Override
public Object deserialize(JsonParser p, DeserializationContext ctxt) throws JacksonException
{
// common case first
if (p.isExpectedStartObjectToken()) {
if (_vanillaProcessing) {
return _vanillaDeserialize(p, ctxt);
}
// 23-Sep-2015, tatu: This is wrong at some many levels, but for now... it is
// what it is, including "expected behavior".
p.nextToken();
if (_objectIdReader != null) {
return deserializeWithObjectId(p, ctxt);
}
return deserializeFromObject(p, ctxt);
}
return _deserializeOther(p, ctxt, p.currentToken());
}
protected final Object _deserializeOther(JsonParser p, DeserializationContext ctxt,
JsonToken t) throws JacksonException
{
// and then others, generally requiring use of @JsonCreator
if (t != null) {
switch (t) {
case VALUE_STRING:
return deserializeFromString(p, ctxt);
case VALUE_NUMBER_INT:
return deserializeFromNumber(p, ctxt);
case VALUE_NUMBER_FLOAT:
return deserializeFromDouble(p, ctxt);
case VALUE_EMBEDDED_OBJECT:
return deserializeFromEmbedded(p, ctxt);
case VALUE_TRUE:
case VALUE_FALSE:
return deserializeFromBoolean(p, ctxt);
case VALUE_NULL:
return deserializeFromNull(p, ctxt);
case START_ARRAY:
// these only work if there's a (delegating) creator, or UNWRAP_SINGLE_ARRAY
return _deserializeFromArray(p, ctxt);
case PROPERTY_NAME:
case END_OBJECT: // added to resolve [JACKSON-319], possible related issues
if (_vanillaProcessing) {
return _vanillaDeserialize(p, ctxt, t);
}
if (_objectIdReader != null) {
return deserializeWithObjectId(p, ctxt);
}
return deserializeFromObject(p, ctxt);
default:
}
}
return ctxt.handleUnexpectedToken(getValueType(ctxt), p);
}
/**
* Secondary deserialization method, called in cases where POJO
* instance is created as part of deserialization, potentially
* after collecting some or all of the properties to set.
*/
@Override
public Object deserialize(JsonParser p, DeserializationContext ctxt, Object bean) throws JacksonException
{
// [databind#631]: Assign current value, to be accessible by custom serializers
p.assignCurrentValue(bean);
if (_injectables != null) {
injectValues(ctxt, bean);
}
if (_unwrappedPropertyHandler != null) {
return deserializeWithUnwrapped(p, ctxt, bean);
}
if (_externalTypeIdHandler != null) {
return deserializeWithExternalTypeId(p, ctxt, bean);
}
String propName;
// 23-Mar-2010, tatu: In some cases, we start with full JSON object too...
if (p.isExpectedStartObjectToken()) {
propName = p.nextName();
if (propName == null) {
return bean;
}
} else if (p.hasTokenId(JsonTokenId.ID_PROPERTY_NAME)) {
propName = p.currentName();
} else {
return bean;
}
if (_needViewProcesing) {
Class<?> view = ctxt.getActiveView();
if (view != null) {
return deserializeWithView(p, ctxt, bean, view);
}
}
// May or may not be interned...
int ix = _propNameMatcher.matchName(propName);
while (ix >= 0) {
p.nextToken();
SettableBeanProperty prop = _propsByIndex[ix];
try {
prop.deserializeAndSet(p, ctxt, bean);
} catch (Exception e) {
throw wrapAndThrow(e, bean, prop.getName(), ctxt);
}
ix = p.nextNameMatch(_propNameMatcher);
}
if (ix != PropertyNameMatcher.MATCH_END_OBJECT) {
if (ix == PropertyNameMatcher.MATCH_UNKNOWN_NAME) {
return _vanillaDeserializeWithUnknown(p, ctxt, bean,
p.currentName());
}
return _handleUnexpectedWithin(p, ctxt, bean);
}
return bean;
}
/*
/**********************************************************************
/* Concrete deserialization methods
/**********************************************************************
*/
/**
* Streamlined version that is only used when no "special"
* features are enabled, and when current logical token
* is {@link JsonToken#START_OBJECT} (or equivalent).
*/
private final Object _vanillaDeserialize(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
final Object bean = _valueInstantiator.createUsingDefault(ctxt);
// [databind#631]: Assign current value, to be accessible by custom serializers
p.assignCurrentValue(bean);
int ix = p.nextNameMatch(_propNameMatcher);
while (ix >= 0) {
p.nextToken();
SettableBeanProperty prop = _propsByIndex[ix];
try {
prop.deserializeAndSet(p, ctxt, bean);
} catch (Exception e) {
wrapAndThrow(e, bean, prop.getName(), ctxt);
}
// Elem #2
ix = p.nextNameMatch(_propNameMatcher);
if (ix < 0) {
break;
}
p.nextToken();
prop = _propsByIndex[ix];
try {
prop.deserializeAndSet(p, ctxt, bean);
} catch (Exception e) {
wrapAndThrow(e, bean, prop.getName(), ctxt);
}
// Elem #3
ix = p.nextNameMatch(_propNameMatcher);
if (ix < 0) {
break;
}
p.nextToken();
prop = _propsByIndex[ix];
try {
prop.deserializeAndSet(p, ctxt, bean);
} catch (Exception e) {
wrapAndThrow(e, bean, prop.getName(), ctxt);
}
// Elem #4
ix = p.nextNameMatch(_propNameMatcher);
if (ix < 0) {
break;
}
p.nextToken();
prop = _propsByIndex[ix];
try {
prop.deserializeAndSet(p, ctxt, bean);
} catch (Exception e) {
wrapAndThrow(e, bean, prop.getName(), ctxt);
}
ix = p.nextNameMatch(_propNameMatcher);
}
if (ix != PropertyNameMatcher.MATCH_END_OBJECT) {
if (ix == PropertyNameMatcher.MATCH_UNKNOWN_NAME) {
return _vanillaDeserializeWithUnknown(p, ctxt, bean,
p.currentName());
}
return _handleUnexpectedWithin(p, ctxt, bean);
}
return bean;
}
/**
* Streamlined version that is only used when no "special"
* features are enabled.
*/
private final Object _vanillaDeserialize(JsonParser p,
DeserializationContext ctxt, JsonToken t)
throws JacksonException
{
final Object bean = _valueInstantiator.createUsingDefault(ctxt);
if (t != JsonToken.PROPERTY_NAME) {
return bean;
}
// [databind#631]: Assign current value, to be accessible by custom serializers
// [databind#4184]: but only if we have at least one property
p.assignCurrentValue(bean);
int ix = p.currentNameMatch(_propNameMatcher);
while (ix >= 0) { // minor unrolling here (by-2), less likely on critical path
SettableBeanProperty prop = _propsByIndex[ix];
p.nextToken();
try {
prop.deserializeAndSet(p, ctxt, bean);
} catch (Exception e) {
throw wrapAndThrow(e, bean, prop.getName(), ctxt);
}
// Elem #2
ix = p.nextNameMatch(_propNameMatcher);
if (ix < 0) {
break;
}
prop = _propsByIndex[ix];
p.nextToken();
try {
prop.deserializeAndSet(p, ctxt, bean);
} catch (Exception e) {
throw wrapAndThrow(e, bean, prop.getName(), ctxt);
}
ix = p.nextNameMatch(_propNameMatcher);
}
if (ix != PropertyNameMatcher.MATCH_END_OBJECT) {
if (ix == PropertyNameMatcher.MATCH_UNKNOWN_NAME) {
return _vanillaDeserializeWithUnknown(p, ctxt, bean,
p.currentName());
}
return _handleUnexpectedWithin(p, ctxt, bean);
}
return bean;
}
private final Object _vanillaDeserializeWithUnknown(JsonParser p,
DeserializationContext ctxt, Object bean, String propName) throws JacksonException
{
p.nextToken();
handleUnknownVanilla(p, ctxt, bean, propName);
while (true) {
int ix = p.nextNameMatch(_propNameMatcher);
if (ix >= 0) { // normal case
p.nextToken();
try {
_propsByIndex[ix].deserializeAndSet(p, ctxt, bean);
} catch (Exception e) {
wrapAndThrow(e, bean, _propsByIndex[ix].getName(), ctxt);
}
continue;
}
if (ix == PropertyNameMatcher.MATCH_END_OBJECT) {
return bean;
}
if (ix != PropertyNameMatcher.MATCH_UNKNOWN_NAME) {
return bean;
}
p.nextToken();
handleUnknownVanilla(p, ctxt, bean, p.currentName());
}
}
/**
* General version used when handling needs more advanced features.
*/
@Override
public Object deserializeFromObject(JsonParser p, DeserializationContext ctxt) throws JacksonException
{
/* 09-Dec-2014, tatu: As per [databind#622], we need to allow Object Id references
* to come in as JSON Objects as well; but for now assume they will
* be simple, single-property references, which means that we can
* recognize them without having to buffer anything.
* Once again, if we must, we can do more complex handling with buffering,
* but let's only do that if and when that becomes necessary.
*/
if ((_objectIdReader != null) && _objectIdReader.maySerializeAsObject()) {
if (p.hasTokenId(JsonTokenId.ID_PROPERTY_NAME)
&& _objectIdReader.isValidReferencePropertyName(p.currentName(), p)) {
return deserializeFromObjectId(p, ctxt);
}
}
if (_nonStandardCreation) {
if (_unwrappedPropertyHandler != null) {
return deserializeWithUnwrapped(p, ctxt);
}
if (_externalTypeIdHandler != null) {
return deserializeWithExternalTypeId(p, ctxt);
}
Object bean = deserializeFromObjectUsingNonDefault(p, ctxt);
// 27-May-2014, tatu: I don't think view processing would work
// at this point, so commenting it out; but leaving in place
// just in case I forgot something fundamental...
/*
if (_needViewProcesing) {
Class<?> view = ctxt.getActiveView();
if (view != null) {
return deserializeWithView(p, ctxt, bean, view);
}
}
*/
return bean;
}
final Object bean = _valueInstantiator.createUsingDefault(ctxt);
// First: do we have native Object Ids (like YAML)?
if (p.canReadObjectId()) {
Object id = p.getObjectId();
if (id != null) {
_handleTypedObjectId(p, ctxt, bean, id);
}
}
// [databind#3838]: since 2.16 Uniform handling of missing objectId
// only for the specific "empty JSON Object" case (and only for non-Native
// Object Ids, see [databind#4607]
else if (_objectIdReader != null && p.hasTokenId(JsonTokenId.ID_END_OBJECT)) {
// [databind#4610]: check if we are to skip failure
if (ctxt.isEnabled(DeserializationFeature.FAIL_ON_UNRESOLVED_OBJECT_IDS)) {
ctxt.reportUnresolvedObjectId(_objectIdReader, bean);
}
}
if (_injectables != null) {
injectValues(ctxt, bean);
}
if (!p.hasTokenId(JsonTokenId.ID_PROPERTY_NAME)) {
// should we check what exactly it is... ?
return bean;
}
// [databind#631]: Assign current value, to be accessible by custom serializers
// [databind#4184]: but only if we have at least one property
p.assignCurrentValue(bean);
if (_needViewProcesing) {
Class<?> view = ctxt.getActiveView();
if (view != null) {
return deserializeWithView(p, ctxt, bean, view);
}
}
for (int ix = p.currentNameMatch(_propNameMatcher); ; ix = p.nextNameMatch(_propNameMatcher)) {
if (ix >= 0) { // normal case
p.nextToken();
try {
_propsByIndex[ix].deserializeAndSet(p, ctxt, bean);
} catch (Exception e) {
throw wrapAndThrow(e, bean, _propsByIndex[ix].getName(), ctxt);
}
continue;
}
if (ix == PropertyNameMatcher.MATCH_END_OBJECT) {
return bean;
}
if (ix != PropertyNameMatcher.MATCH_UNKNOWN_NAME) {
return _handleUnexpectedWithin(p, ctxt, bean);
}
p.nextToken();
handleUnknownVanilla(p, ctxt, bean, p.currentName());
}
}
/**
* Method called to deserialize bean using "property-based creator":
* this means that a non-default constructor or factory method is
* called, and then possibly other setters. The trick is that
* values for creator method need to be buffered, first; and
* due to non-guaranteed ordering possibly some other properties
* as well.
*/
@Override
protected Object _deserializeUsingPropertyBased(final JsonParser p, final DeserializationContext ctxt)
throws JacksonException
{
final PropertyBasedCreator creator = _propertyBasedCreator;
PropertyValueBuffer buffer = (_anySetter != null)
? creator.startBuildingWithAnySetter(p, ctxt, _objectIdReader, _anySetter)
: creator.startBuilding(p, ctxt, _objectIdReader);
TokenBuffer unknown = null;
final Class<?> activeView = _needViewProcesing ? ctxt.getActiveView() : null;
JsonToken t = p.currentToken();
List<BeanReferring> referrings = null;
for (; t == JsonToken.PROPERTY_NAME; t = p.nextToken()) {
String propName = p.currentName();
p.nextToken(); // to point to value
final SettableBeanProperty creatorProp = creator.findCreatorProperty(propName);
// Object Id property?
if (buffer.readIdProperty(propName) && creatorProp == null) {
continue;
}
// [databind#4629] Need to check for ignored properties BEFORE checking for Creator properties.
// Records (and other creator-based types) will have a valid 'creatorProp', so if we don't
// check for ignore first, the ignore configuration will be bypassed.
if (IgnorePropertiesUtil.shouldIgnore(propName, _ignorableProps, _includableProps)) {
handleIgnoredProperty(p, ctxt, handledType(), propName);
continue;
}
// Creator property?
if (creatorProp != null) {
Object value;
if ((activeView != null) && !creatorProp.visibleInView(activeView)) {
p.skipChildren();
continue;
}
// [databind#1381]: if useInput=FALSE, skip deserialization from input
if (creatorProp.isInjectionOnly()) {
// Skip the input value, will be injected later in PropertyValueBuffer
p.skipChildren();
continue;
}
value = _deserializeWithErrorWrapping(p, ctxt, creatorProp);
// Last creator property to set?
if (buffer.assignParameter(creatorProp, value)) {
p.nextToken(); // to move to following PROPERTY_NAME/END_OBJECT
Object bean;
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
bean = wrapInstantiationProblem(ctxt, e);
}
// [databind#631]: Assign current value, to be accessible by custom serializers
p.assignCurrentValue(bean);
// [databind#4938] Since 2.19, allow returning `null` from creator,
// but if so, need to skip all possibly relevant content
if (bean == null) {
_handleNullFromPropsBasedCreator(p, ctxt, unknown, referrings);
return null;
}
if (bean.getClass() != _beanType.getRawClass()) {
return handlePolymorphic(p, ctxt, bean, unknown);
}
if (unknown != null) { // nope, just extra unknown stuff...
bean = handleUnknownProperties(ctxt, bean, unknown);
}
// or just clean?
return deserialize(p, ctxt, bean);
}
continue;
}
// regular property? needs buffering
int ix = _propNameMatcher.matchName(propName);
if (ix >= 0) {
SettableBeanProperty prop = _propsByIndex[ix];
// [databind#3724]: Special handling because Records' ignored creator props
// weren't removed (to help in creating constructor-backed PropertyCreator)
// so they ended up in _beanProperties, unlike POJO (whose ignored
// props are removed)
// [databind#3938]: except if it's MethodProperty
if (!_beanType.isRecordType() || (prop instanceof MethodProperty)) {
// 12-Aug-2025, tatu: [databind#5237] Mergeable properties need
// special handling: must defer deserialization until POJO
// is constructed.
if (prop.isMerging()) {
TokenBuffer tb = ctxt.bufferForInputBuffering(p);
tb.copyCurrentStructure(p);
buffer.bufferMergingProperty(prop, tb);
continue;
}
try {
buffer.bufferProperty(prop, _deserializeWithErrorWrapping(p, ctxt, prop));
} catch (UnresolvedForwardReference reference) {
// 14-Jun-2016, tatu: As per [databind#1261], looks like we need additional
// handling of forward references here. Not exactly sure why existing
// facilities did not cover, but this does appear to solve the problem
BeanReferring referring = handleUnresolvedReference(ctxt,
prop, buffer, reference);
if (referrings == null) {
referrings = new ArrayList<>();
}
referrings.add(referring);
}
continue;
}
}
// "any property"?
if (_anySetter != null) {
try {
// [databind#4639] Since 2.18.1 AnySetter might not part of the creator, but just some field.
if (_anySetter.isFieldType() ||
// [databind#4639] 2.18.2: Also should account for setter type :-/
_anySetter.isSetterType()) {
buffer.bufferAnyProperty(_anySetter, propName, _anySetter.deserialize(p, ctxt));
} else {
buffer.bufferAnyParameterProperty(_anySetter, propName, _anySetter.deserialize(p, ctxt));
}
} catch (Exception e) {
throw wrapAndThrow(e, _beanType.getRawClass(), propName, ctxt);
}
continue;
}
// 29-Mar-2021, tatu: [databind#3082] May skip collection if we know
// they'd just get ignored (note: any-setter handled above; unwrapped
// properties also separately handled)
if (_ignoreAllUnknown) {
// 22-Aug-2021, tatu: [databind#3252] must ensure we do skip the whole value
p.skipChildren();
continue;
}
// Ok then, let's collect the whole field; name and value
if (unknown == null) {
unknown = ctxt.bufferForInputBuffering(p);
}
unknown.writeName(propName);
unknown.copyCurrentStructure(p);
}
// We hit END_OBJECT, so:
Object bean;
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
return wrapInstantiationProblem(ctxt, e);
}
p.assignCurrentValue(bean);
// [databind#4938] Since 2.19, allow returning `null` from creator,
// but if so, need to skip all possibly relevant content
if (bean == null) {
_handleNullFromPropsBasedCreator(null, ctxt, unknown, referrings);
return null;
}
// 13-Apr-2020, tatu: [databind#2678] need to handle injection here
if (_injectables != null) {
injectValues(ctxt, bean);
}
if (referrings != null) {
for (BeanReferring referring : referrings) {
referring.setBean(bean);
}
}
if (unknown != null) {
// polymorphic?
if (bean.getClass() != _beanType.getRawClass()) { // lgtm [java/dereferenced-value-may-be-null]
return handlePolymorphic(null, ctxt, bean, unknown);
}
// no, just some extra unknown properties
return handleUnknownProperties(ctxt, bean, unknown);
}
return bean;
}
private BeanReferring handleUnresolvedReference(DeserializationContext ctxt,
SettableBeanProperty prop, PropertyValueBuffer buffer,
UnresolvedForwardReference reference)
throws DatabindException
{
BeanReferring referring = new BeanReferring(ctxt, reference,
prop.getType(), buffer, prop);
reference.getRoid().appendReferring(referring);
return referring;
}
protected final Object _deserializeWithErrorWrapping(JsonParser p,
DeserializationContext ctxt, SettableBeanProperty prop)
throws DatabindException
{
try {
return prop.deserialize(p, ctxt);
} catch (Exception e) {
throw wrapAndThrow(e, _beanType.getRawClass(), prop.getName(), ctxt);
}
}
/**
* Helper method called for rare case of pointing to {@link JsonToken#VALUE_NULL}
* token. While this is most often an erroneous condition, there is one specific
* case with XML handling where polymorphic type with no properties is exposed
* as such, and should be handled same as empty Object.
*/
protected Object deserializeFromNull(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
// 17-Dec-2015, tatu: Highly specialized case, mainly to support polymorphic
// "empty" POJOs deserialized from XML, where empty XML tag synthesizes a
// `VALUE_NULL` tokens
/*
if (p.canSynthesizeNulls()) {
TokenBuffer tb = ctxt.bufferForInputBuffering(p);
tb.writeEndObject();
JsonParser p2 = tb.asParser(ctxt, p);
p2.nextToken(); // to point to END_OBJECT
// note: don't have ObjectId to consider at this point, so:
Object ob = _vanillaProcessing ? _vanillaDeserialize(p2, ctxt, JsonToken.END_OBJECT)
: deserializeFromObject(p2, ctxt);
p2.close();
tb.close();
return ob;
}
*/
return ctxt.handleUnexpectedToken(getValueType(ctxt), p);
}
@Override
protected Object _deserializeFromArray(JsonParser p, DeserializationContext ctxt) throws JacksonException
{
// note: cannot call `_delegateDeserializer()` since order reversed here:
ValueDeserializer<Object> delegateDeser = _arrayDelegateDeserializer;
// fallback to non-array delegate
if ((delegateDeser != null) || ((delegateDeser = _delegateDeserializer) != null)) {
Object bean = _valueInstantiator.createUsingArrayDelegate(ctxt,
delegateDeser.deserialize(p, ctxt));
if (_injectables != null) {
injectValues(ctxt, bean);
}
return bean;
}
final CoercionAction act = _findCoercionFromEmptyArray(ctxt);
final boolean unwrap = ctxt.isEnabled(DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS);
if (unwrap || (act != CoercionAction.Fail)) {
JsonToken unwrappedToken = p.nextToken();
if (unwrappedToken == JsonToken.END_ARRAY) {
switch (act) {
case AsEmpty:
return getEmptyValue(ctxt);
case AsNull:
case TryConvert:
return getNullValue(ctxt);
default:
}
return ctxt.handleUnexpectedToken(getValueType(ctxt), JsonToken.START_ARRAY, p, null);
}
if (unwrap) {
// 23-Aug-2022, tatu: To prevent unbounded nested arrays, we better
// check there is NOT another START_ARRAY lurking there..
if (unwrappedToken == JsonToken.START_ARRAY) {
JavaType targetType = getValueType(ctxt);
return ctxt.handleUnexpectedToken(targetType, JsonToken.START_ARRAY, p,
"Cannot deserialize value of type %s from deeply-nested Array: only single wrapper allowed with `%s`",
ClassUtil.getTypeDescription(targetType),
"DeserializationFeature.UNWRAP_SINGLE_VALUE_ARRAYS");
}
final Object value = deserialize(p, ctxt);
if (p.nextToken() != JsonToken.END_ARRAY) {
handleMissingEndArrayForSingle(p, ctxt);
}
return value;
}
// 15-Nov-2022, tatu: ... we probably should pass original `JsonToken.START_ARRAY`
// as unexpected token, since `p` now points to `unwrappedToken` instead...
}
return ctxt.handleUnexpectedToken(getValueType(ctxt), p);
}
/*
/**********************************************************************
/* Deserializing when we have to consider an active View
/**********************************************************************
*/
protected final Object deserializeWithView(JsonParser p, DeserializationContext ctxt,
Object bean, Class<?> activeView)
throws JacksonException
{
for (int ix = p.currentNameMatch(_propNameMatcher); ; ix = p.nextNameMatch(_propNameMatcher)) {
if (ix >= 0) {
p.nextToken();
SettableBeanProperty prop = _propsByIndex[ix];
if (!prop.visibleInView(activeView)) {
// [databind#437]: fields in other views to be considered as unknown properties
if (ctxt.isEnabled(DeserializationFeature.FAIL_ON_UNEXPECTED_VIEW_PROPERTIES)){
ctxt.reportInputMismatch(handledType(),
String.format("Input mismatch while deserializing %s. Property '%s' is not part of current active view '%s'" +
" (disable 'DeserializationFeature.FAIL_ON_UNEXPECTED_VIEW_PROPERTIES' to allow)",
ClassUtil.nameOf(handledType()), prop.getName(), activeView.getName()));
}
p.skipChildren();
continue;
}
try {
prop.deserializeAndSet(p, ctxt, bean);
} catch (Exception e) {
wrapAndThrow(e, bean, prop.getName(), ctxt);
}
continue;
}
if (ix != PropertyNameMatcher.MATCH_END_OBJECT) {
if (ix != PropertyNameMatcher.MATCH_UNKNOWN_NAME) {
return _handleUnexpectedWithin(p, ctxt, bean);
}
p.nextToken();
handleUnknownVanilla(p, ctxt, bean, p.currentName());
continue;
}
return bean;
}
}
/*
/**********************************************************************
/* Handling for cases where we have "unwrapped" values
/**********************************************************************
*/
/**
* Method called when there are declared "unwrapped" properties
* which need special handling
*/
@SuppressWarnings("resource")
protected Object deserializeWithUnwrapped(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
if (_delegateDeserializer != null) {
return _valueInstantiator.createUsingDelegate(ctxt, _delegateDeserializer.deserialize(p, ctxt));
}
if (_propertyBasedCreator != null) {
return deserializeUsingPropertyBasedWithUnwrapped(p, ctxt);
}
TokenBuffer tokens = ctxt.bufferForInputBuffering(p);
tokens.writeStartObject();
final Object bean = _valueInstantiator.createUsingDefault(ctxt);
// [databind#631]: Assign current value, to be accessible by custom serializers
p.assignCurrentValue(bean);
if (_injectables != null) {
injectValues(ctxt, bean);
}
final Class<?> activeView = _needViewProcesing ? ctxt.getActiveView() : null;
for (int ix = p.currentNameMatch(_propNameMatcher); ; ix = p.nextNameMatch(_propNameMatcher)) {
if (ix >= 0) { // common case
p.nextToken();
SettableBeanProperty prop = _propsByIndex[ix];
if ((activeView != null) && !prop.visibleInView(activeView)) {
p.skipChildren();
continue;
}
try {
prop.deserializeAndSet(p, ctxt, bean);
} catch (Exception e) {
throw wrapAndThrow(e, bean, prop.getName(), ctxt);
}
continue;
}
if (ix == PropertyNameMatcher.MATCH_END_OBJECT) {
break;
}
if (ix == PropertyNameMatcher.MATCH_ODD_TOKEN) {
return _handleUnexpectedWithin(p, ctxt, bean);
}
final String propName = p.currentName();
p.nextToken();
// Things marked as ignorable should not be passed to any setter
if (IgnorePropertiesUtil.shouldIgnore(propName, _ignorableProps, _includableProps)) {
handleIgnoredProperty(p, ctxt, bean, propName);
continue;
}
// 29-Nov-2016, tatu: probably should try to avoid sending content
// both to any setter AND buffer... but, for now, the only thing
// we can do.
// how about any setter? We'll get copies but...
if (_anySetter == null) {
// but... others should be passed to unwrapped property deserializers
tokens.writeName(propName);
tokens.copyCurrentStructure(p);
continue;
}
// Need to copy to a separate buffer first
TokenBuffer b2 = ctxt.bufferAsCopyOfValue(p);
tokens.writeName(propName);
tokens.append(b2);
try {
_anySetter.deserializeAndSet(b2.asParserOnFirstToken(ctxt), ctxt, bean, propName);
} catch (Exception e) {
throw wrapAndThrow(e, bean, propName, ctxt);
}
}
tokens.writeEndObject();
_unwrappedPropertyHandler.processUnwrapped(p, ctxt, bean, tokens);
return bean;
}
@SuppressWarnings("resource")
protected Object deserializeWithUnwrapped(JsonParser p, DeserializationContext ctxt,
Object bean)
throws JacksonException
{
JsonToken t = p.currentToken();
if (t == JsonToken.START_OBJECT) {
t = p.nextToken();
}
TokenBuffer tokens = ctxt.bufferForInputBuffering(p);
tokens.writeStartObject();
final Class<?> activeView = _needViewProcesing ? ctxt.getActiveView() : null;
for (int ix = p.currentNameMatch(_propNameMatcher); ; ix = p.nextNameMatch(_propNameMatcher)) {
if (ix >= 0) { // common case
p.nextToken();
SettableBeanProperty prop = _propsByIndex[ix];
if ((activeView != null) && !prop.visibleInView(activeView)) {
p.skipChildren();
continue;
}
try {
prop.deserializeAndSet(p, ctxt, bean);
} catch (Exception e) {
throw wrapAndThrow(e, bean, prop.getName(), ctxt);
}
continue;
}
if (ix == PropertyNameMatcher.MATCH_END_OBJECT) {
break;
}
if (ix == PropertyNameMatcher.MATCH_ODD_TOKEN) {
return _handleUnexpectedWithin(p, ctxt, bean);
}
final String propName = p.currentName();
p.nextToken();
if (IgnorePropertiesUtil.shouldIgnore(propName, _ignorableProps, _includableProps)) {
handleIgnoredProperty(p, ctxt, bean, propName);
continue;
}
// 29-Nov-2016, tatu: probably should try to avoid sending content
// both to any setter AND buffer... but, for now, the only thing
// we can do.
// how about any setter? We'll get copies but...
if (_anySetter == null) {
// but... others should be passed to unwrapped property deserializers
tokens.writeName(propName);
tokens.copyCurrentStructure(p);
} else {
// Need to copy to a separate buffer first
TokenBuffer b2 = ctxt.bufferAsCopyOfValue(p);
tokens.writeName(propName);
tokens.append(b2);
try {
_anySetter.deserializeAndSet(b2.asParserOnFirstToken(ctxt), ctxt, bean, propName);
} catch (Exception e) {
throw wrapAndThrow(e, bean, propName, ctxt);
}
}
}
tokens.writeEndObject();
_unwrappedPropertyHandler.processUnwrapped(p, ctxt, bean, tokens);
return bean;
}
@SuppressWarnings("resource")
protected Object deserializeUsingPropertyBasedWithUnwrapped(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
// 01-Dec-2016, tatu: Note: This IS legal to call, but only when unwrapped
// value itself is NOT passed via `CreatorProperty` (which isn't supported).
// Ok however to pass via setter or field.
final PropertyBasedCreator creator = _propertyBasedCreator;
PropertyValueBuffer buffer = creator.startBuilding(p, ctxt, _objectIdReader);
TokenBuffer tokens = ctxt.bufferForInputBuffering(p);
tokens.writeStartObject();
JsonToken t = p.currentToken();
for (; t == JsonToken.PROPERTY_NAME; t = p.nextToken()) {
String propName = p.currentName();
p.nextToken(); // to point to value
// creator property?
final SettableBeanProperty creatorProp = creator.findCreatorProperty(propName);
// Object Id property?
if (buffer.readIdProperty(propName) && creatorProp == null) {
continue;
}
if (creatorProp != null) {
// [databind#1381]: if useInput=FALSE, skip deserialization from input
if (creatorProp.isInjectionOnly()) {
// Skip the input value, will be injected later in PropertyValueBuffer
p.skipChildren();
continue;
}
// Last creator property to set?
if (buffer.assignParameter(creatorProp,
_deserializeWithErrorWrapping(p, ctxt, creatorProp))) {
t = p.nextToken(); // to move to following PROPERTY_NAME/END_OBJECT
Object bean;
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
bean = wrapInstantiationProblem(ctxt, e);
}
// [databind#631]: Assign current value, to be accessible by custom serializers
p.assignCurrentValue(bean);
// [databind#4938] Since 2.19, allow returning `null` from creator,
// but if so, need to skip all possibly relevant content
if (bean == null) {
// 13-Mar-2025, tatu: We don't have "referrings" here for some reason...
// Nor "unknown" since unwrapping makes it impossible to tell unwrapped
// and unknown apart
_handleNullFromPropsBasedCreator(p, ctxt, null, null);
return null;
}
// if so, need to copy all remaining tokens into buffer
while (t == JsonToken.PROPERTY_NAME) {
// NOTE: do NOT skip name as it needs to be copied; `copyCurrentStructure` does that
tokens.copyCurrentStructure(p);
t = p.nextToken();
}
// 28-Aug-2018, tatu: Let's add sanity check here, easier to catch off-by-some
// problems if we maintain invariants
if (t != JsonToken.END_OBJECT) {
ctxt.reportWrongTokenException(this, JsonToken.END_OBJECT,
"Attempted to unwrap '%s' value",
handledType().getName());
}
tokens.writeEndObject();
if (bean.getClass() != _beanType.getRawClass()) {
// !!! 08-Jul-2011, tatu: Could probably support; but for now
// it's too complicated, so bail out
return ctxt.reportInputMismatch(creatorProp,
"Cannot create polymorphic instances with unwrapped values");
}
return _unwrappedPropertyHandler.processUnwrapped(p, ctxt, bean, tokens);
}
continue;
}
// regular property? needs buffering
int ix = _propNameMatcher.matchName(propName);
if (ix >= 0) {
SettableBeanProperty prop = _propsByIndex[ix];
buffer.bufferProperty(prop, _deserializeWithErrorWrapping(p, ctxt, prop));
continue;
}
// Things marked as ignorable should not be passed to any setter
if (IgnorePropertiesUtil.shouldIgnore(propName, _ignorableProps, _includableProps)) {
handleIgnoredProperty(p, ctxt, handledType(), propName);
continue;
}
// 29-Nov-2016, tatu: probably should try to avoid sending content
// both to any setter AND buffer... but, for now, the only thing
// we can do.
// how about any setter? We'll get copies but...
if (_anySetter == null) {
// but... others should be passed to unwrapped property deserializers
tokens.writeName(propName);
tokens.copyCurrentStructure(p);
} else {
// Need to copy to a separate buffer first
TokenBuffer b2 = ctxt.bufferAsCopyOfValue(p);
tokens.writeName(propName);
tokens.append(b2);
try {
buffer.bufferAnyProperty(_anySetter, propName,
_anySetter.deserialize(b2.asParserOnFirstToken(ctxt), ctxt));
} catch (Exception e) {
throw wrapAndThrow(e, _beanType.getRawClass(), propName, ctxt);
}
}
}
// We could still have some not-yet-set creator properties that are unwrapped.
// These have to be processed last, because 'tokens' contains all properties
// that remain after regular deserialization.
buffer = _unwrappedPropertyHandler.processUnwrappedCreatorProperties(p, ctxt, buffer, tokens);
// We hit END_OBJECT, so:
Object bean;
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
return wrapInstantiationProblem(ctxt, e);
}
// [databind#4938] Since 2.19, allow returning `null` from creator,
// but if so, need to skip all possibly relevant content
if (bean == null) {
// no "referrings" here either:
_handleNullFromPropsBasedCreator(null, ctxt, null, null);
return null;
}
return _unwrappedPropertyHandler.processUnwrapped(p, ctxt, bean, tokens);
}
/*
/**********************************************************************
/* Handling for cases where we have property/-ies with external type id
/**********************************************************************
*/
protected Object deserializeWithExternalTypeId(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
if (_propertyBasedCreator != null) {
return deserializeUsingPropertyBasedWithExternalTypeId(p, ctxt);
}
if (_delegateDeserializer != null) {
/* 24-Nov-2015, tatu: Use of delegating creator needs to have precedence, and basically
* external type id handling just has to be ignored, as they would relate to target
* type and not delegate type. Whether this works as expected is another story, but
* there's no other way to really mix these conflicting features.
*/
return _valueInstantiator.createUsingDelegate(ctxt,
_delegateDeserializer.deserialize(p, ctxt));
}
return deserializeWithExternalTypeId(p, ctxt, _valueInstantiator.createUsingDefault(ctxt));
}
protected Object deserializeWithExternalTypeId(JsonParser p, DeserializationContext ctxt,
Object bean)
throws JacksonException
{
return _deserializeWithExternalTypeId(p, ctxt, bean, _externalTypeIdHandler.start());
}
protected Object _deserializeWithExternalTypeId(JsonParser p, DeserializationContext ctxt,
Object bean, ExternalTypeHandler ext)
throws JacksonException
{
final Class<?> activeView = _needViewProcesing ? ctxt.getActiveView() : null;
for (int ix = p.currentNameMatch(_propNameMatcher); ; ix = p.nextNameMatch(_propNameMatcher)) {
if (ix >= 0) { // normal case
SettableBeanProperty prop = _propsByIndex[ix];
JsonToken t = p.nextToken();
// [JACKSON-831]: may have property AND be used as external type id:
if (t.isScalarValue()) {
ext.handleTypePropertyValue(p, ctxt, p.currentName(), bean);
}
if (activeView != null && !prop.visibleInView(activeView)) {
p.skipChildren();
continue;
}
try {
prop.deserializeAndSet(p, ctxt, bean);
} catch (Exception e) {
throw wrapAndThrow(e, bean, prop.getName(), ctxt);
}
continue;
}
if (ix == PropertyNameMatcher.MATCH_END_OBJECT) {
break;
}
if (ix != PropertyNameMatcher.MATCH_UNKNOWN_NAME) {
return _handleUnexpectedWithin(p, ctxt, bean);
}
// ignorable things should be ignored
final String propName = p.currentName();
p.nextToken();
if (IgnorePropertiesUtil.shouldIgnore(propName, _ignorableProps, _includableProps)) {
handleIgnoredProperty(p, ctxt, bean, propName);
continue;
}
// but others are likely to be part of external type id thingy...
if (ext.handlePropertyValue(p, ctxt, propName, bean)) {
continue;
}
// if not, the usual fallback handling:
if (_anySetter != null) {
try {
_anySetter.deserializeAndSet(p, ctxt, bean, propName);
} catch (Exception e) {
throw wrapAndThrow(e, bean, propName, ctxt);
}
continue;
}
// Unknown: let's call handler method
handleUnknownProperty(p, ctxt, bean, p.currentName());
}
// and when we get this far, let's try finalizing the deal:
return ext.complete(p, ctxt, bean);
}
@SuppressWarnings("resource")
protected Object deserializeUsingPropertyBasedWithExternalTypeId(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
final ExternalTypeHandler ext = _externalTypeIdHandler.start();
final PropertyBasedCreator creator = _propertyBasedCreator;
PropertyValueBuffer buffer = creator.startBuilding(p, ctxt, _objectIdReader);
for (JsonToken t = p.currentToken(); t == JsonToken.PROPERTY_NAME; t = p.nextToken()) {
String propName = p.currentName();
t = p.nextToken(); // to point to value
// creator property?
final SettableBeanProperty creatorProp = creator.findCreatorProperty(propName);
// Object Id property?
if (buffer.readIdProperty(propName) && creatorProp == null) {
continue;
}
if (creatorProp != null) {
// [databind#1381]: if useInput=FALSE, skip deserialization from input
if (creatorProp.isInjectionOnly()) {
// Skip the input value, will be injected later in PropertyValueBuffer
p.skipChildren();
continue;
}
// first: let's check to see if this might be part of value with external type id:
// 11-Sep-2015, tatu: Important; do NOT pass buffer as last arg, but null,
// since it is not the bean
if (!ext.handlePropertyValue(p, ctxt, propName, null)) {
// Last creator property to set?
if (buffer.assignParameter(creatorProp, _deserializeWithErrorWrapping(p, ctxt, creatorProp))) {
t = p.nextToken(); // to move to following PROPERTY_NAME/END_OBJECT
Object bean;
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
throw wrapAndThrow(e, _beanType.getRawClass(), propName, ctxt);
}
if (bean.getClass() != _beanType.getRawClass()) {
// !!! 08-Jul-2011, tatu: Could theoretically support; but for now
// it's too complicated, so bail out
return ctxt.reportBadDefinition(_beanType, String.format(
"Cannot create polymorphic instances with external type ids (%s -> %s)",
_beanType, bean.getClass()));
}
// 19-Feb-2021, tatu: [databind#3045] Better delegate
return _deserializeWithExternalTypeId(p, ctxt, bean, ext);
}
}
continue;
}
// regular property? needs buffering
int ix = _propNameMatcher.matchName(propName);
if (ix >= 0) {
SettableBeanProperty prop = _propsByIndex[ix];
// [databind#3045]: may have property AND be used as external type id:
if (t.isScalarValue()) {
ext.handleTypePropertyValue(p, ctxt, propName, null);
}
buffer.bufferProperty(prop, prop.deserialize(p, ctxt));
continue;
}
// external type id (or property that depends on it)?
if (ext.handlePropertyValue(p, ctxt, propName, null)) {
continue;
}
// Things marked as ignorable should not be passed to any setter
if (IgnorePropertiesUtil.shouldIgnore(propName, _ignorableProps, _includableProps)) {
handleIgnoredProperty(p, ctxt, handledType(), propName);
continue;
}
// "any property"?
if (_anySetter != null) {
buffer.bufferAnyProperty(_anySetter, propName,
_anySetter.deserialize(p, ctxt));
continue;
}
// Unknown: let's call handler method
handleUnknownProperty(p, ctxt, _valueClass, propName);
}
// We hit END_OBJECT; resolve the pieces:
try {
return ext.complete(p, ctxt, buffer, creator);
} catch (Exception e) {
return wrapInstantiationProblem(ctxt, e);
}
}
protected void _handleNullFromPropsBasedCreator(JsonParser p, DeserializationContext ctxt,
TokenBuffer unknown, List<BeanReferring> referrings)
{
if (p != null) {
JsonToken t = p.currentToken();
while (t == JsonToken.PROPERTY_NAME) {
p.nextToken();
p.skipChildren();
t = p.nextToken();
}
}
if (unknown != null) { // nope, just extra unknown stuff...
handleUnknownProperties(ctxt, null, unknown);
}
if (referrings != null) {
for (BeanReferring referring : referrings) {
referring.setBean(null);
}
}
}
/**
* Method called if an unexpected token (other then {@code JsonToken.PROPERTY_NAME})
* is found after POJO has been instantiated and partially bound.
*
* @since 3.0
*/
protected Object _handleUnexpectedWithin(JsonParser p,
DeserializationContext ctxt, Object bean) throws JacksonException
{
return ctxt.handleUnexpectedToken(getValueType(ctxt), p);
}
static
|
BeanDeserializer
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/common/state/AggregatingStateDeclarationTest.java
|
{
"start": 1167,
"end": 3073
}
|
class ____ {
private AggregatingStateDeclaration<Integer, Integer, Integer> aggregatingStateDeclaration;
private AggregateFunction<Integer, Integer, Integer> aggregateFunction;
@BeforeEach
void setUp() {
aggregateFunction =
new AggregateFunction<Integer, Integer, Integer>() {
@Override
public Integer createAccumulator() {
return 0;
}
@Override
public Integer add(Integer value, Integer accumulator) {
return 0;
}
@Override
public Integer getResult(Integer accumulator) {
return 0;
}
@Override
public Integer merge(Integer a, Integer b) {
return 0;
}
};
aggregatingStateDeclaration =
StateDeclarations.aggregatingStateBuilder(
"aggregatingState", TypeDescriptors.INT, aggregateFunction)
.build();
}
@Test
void testAggregatingStateDeclarationName() {
assertThat(aggregatingStateDeclaration.getName()).isEqualTo("aggregatingState");
}
@Test
void testAggregatingStateDeclarationFunc() {
assertThat(aggregatingStateDeclaration.getAggregateFunction()).isEqualTo(aggregateFunction);
}
@Test
void testAggregatingStateDeclarationType() {
assertThat(aggregatingStateDeclaration.getTypeDescriptor()).isEqualTo(TypeDescriptors.INT);
}
@Test
void testAggregatingStateDeclarationDist() {
assertThat(aggregatingStateDeclaration.getRedistributionMode())
.isEqualTo(StateDeclaration.RedistributionMode.NONE);
}
}
|
AggregatingStateDeclarationTest
|
java
|
resilience4j__resilience4j
|
resilience4j-metrics/src/test/java/io/github/resilience4j/metrics/assertion/CounterAssert.java
|
{
"start": 180,
"end": 625
}
|
class ____ extends AbstractAssert<CounterAssert, Counter> {
public CounterAssert(Counter actual) {
super(actual, CounterAssert.class);
}
public static CounterAssert assertThat(Counter actual) {
return new CounterAssert(actual);
}
public <T> CounterAssert hasValue(T expected) {
isNotNull();
Assertions.assertThat(actual.getCount()).isEqualTo(expected);
return this;
}
}
|
CounterAssert
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/TriRpcStatus.java
|
{
"start": 1935,
"end": 10548
}
|
class ____ implements Serializable {
private static final long serialVersionUID = 1L;
public static final TriRpcStatus OK = fromCode(Code.OK);
public static final TriRpcStatus UNKNOWN = fromCode(Code.UNKNOWN);
public static final TriRpcStatus INTERNAL = fromCode(Code.INTERNAL);
public static final TriRpcStatus NOT_FOUND = fromCode(Code.NOT_FOUND);
public static final TriRpcStatus CANCELLED = fromCode(Code.CANCELLED);
public static final TriRpcStatus UNAVAILABLE = fromCode(Code.UNAVAILABLE);
public static final TriRpcStatus UNIMPLEMENTED = fromCode(Code.UNIMPLEMENTED);
public static final TriRpcStatus DEADLINE_EXCEEDED = fromCode(Code.DEADLINE_EXCEEDED);
public final Code code;
public final Throwable cause;
public final String description;
public TriRpcStatus(Code code, Throwable cause, String description) {
this.code = code;
this.cause = cause;
this.description = description;
}
public static TriRpcStatus fromCode(int code) {
return fromCode(Code.fromCode(code));
}
public static TriRpcStatus fromCode(Code code) {
return new TriRpcStatus(code, null, null);
}
/**
* todo The remaining exceptions are converted to status
*/
public static TriRpcStatus getStatus(Throwable throwable) {
return getStatus(throwable, null);
}
public static TriRpcStatus getStatus(Throwable throwable, String description) {
if (throwable instanceof HttpStatusException) {
int statusCode = ((HttpStatusException) throwable).getStatusCode();
return new TriRpcStatus(httpStatusToGrpcCode(statusCode), throwable, description);
}
if (throwable instanceof StatusRpcException) {
return ((StatusRpcException) throwable).getStatus();
}
if (throwable instanceof RpcException) {
RpcException rpcException = (RpcException) throwable;
Code code = dubboCodeToTriCode(rpcException.getCode());
return new TriRpcStatus(code, throwable, description);
}
if (throwable instanceof TimeoutException) {
return new TriRpcStatus(Code.DEADLINE_EXCEEDED, throwable, description);
}
return new TriRpcStatus(Code.UNKNOWN, throwable, description);
}
public static int triCodeToDubboCode(Code triCode) {
int code;
switch (triCode) {
case DEADLINE_EXCEEDED:
code = TIMEOUT_EXCEPTION;
break;
case PERMISSION_DENIED:
code = FORBIDDEN_EXCEPTION;
break;
case UNAVAILABLE:
code = NETWORK_EXCEPTION;
break;
case UNIMPLEMENTED:
code = METHOD_NOT_FOUND;
break;
default:
code = UNKNOWN_EXCEPTION;
}
return code;
}
public static Code dubboCodeToTriCode(int rpcExceptionCode) {
Code code;
switch (rpcExceptionCode) {
case TIMEOUT_EXCEPTION:
case TIMEOUT_TERMINATE:
code = Code.DEADLINE_EXCEEDED;
break;
case FORBIDDEN_EXCEPTION:
code = Code.PERMISSION_DENIED;
break;
case AUTHORIZATION_EXCEPTION:
code = Code.UNAUTHENTICATED;
break;
case LIMIT_EXCEEDED_EXCEPTION:
case NETWORK_EXCEPTION:
code = Code.UNAVAILABLE;
break;
case METHOD_NOT_FOUND:
code = Code.UNIMPLEMENTED;
break;
case SERIALIZATION_EXCEPTION:
code = Code.INTERNAL;
break;
default:
code = Code.UNKNOWN;
break;
}
return code;
}
public static String limitSizeTo1KB(String desc) {
if (desc.length() < 1024) {
return desc;
} else {
return desc.substring(0, 1024);
}
}
public static String decodeMessage(String raw) {
if (StringUtils.isEmpty(raw)) {
return "";
}
return QueryStringDecoder.decodeComponent(raw);
}
public static String encodeMessage(String raw) {
if (StringUtils.isEmpty(raw)) {
return "";
}
return encodeComponent(raw);
}
private static String encodeComponent(String raw) {
QueryStringEncoder encoder = new QueryStringEncoder("");
encoder.addParam("", raw);
// ?=
return encoder.toString().substring(2);
}
public static Code httpStatusToGrpcCode(int httpStatusCode) {
if (httpStatusCode >= 100 && httpStatusCode < 200) {
return Code.INTERNAL;
}
if (httpStatusCode == HttpResponseStatus.BAD_REQUEST.code()
|| httpStatusCode == HttpResponseStatus.REQUEST_HEADER_FIELDS_TOO_LARGE.code()) {
return Code.INTERNAL;
} else if (httpStatusCode == HttpResponseStatus.UNAUTHORIZED.code()) {
return Code.UNAUTHENTICATED;
} else if (httpStatusCode == HttpResponseStatus.FORBIDDEN.code()) {
return Code.PERMISSION_DENIED;
} else if (httpStatusCode == HttpResponseStatus.NOT_FOUND.code()) {
return Code.UNIMPLEMENTED;
} else if (httpStatusCode == HttpResponseStatus.BAD_GATEWAY.code()
|| httpStatusCode == HttpResponseStatus.TOO_MANY_REQUESTS.code()
|| httpStatusCode == HttpResponseStatus.SERVICE_UNAVAILABLE.code()
|| httpStatusCode == HttpResponseStatus.GATEWAY_TIMEOUT.code()) {
return Code.UNAVAILABLE;
} else {
return Code.UNKNOWN;
}
}
public static int grpcCodeToHttpStatus(Code code) {
switch (code) {
case OK:
return HttpResponseStatus.OK.code();
case CANCELLED:
return 499;
case UNKNOWN:
case DATA_LOSS:
case INTERNAL:
return HttpResponseStatus.INTERNAL_SERVER_ERROR.code();
case INVALID_ARGUMENT:
case FAILED_PRECONDITION:
case OUT_OF_RANGE:
return HttpResponseStatus.BAD_REQUEST.code();
case DEADLINE_EXCEEDED:
return HttpResponseStatus.GATEWAY_TIMEOUT.code();
case NOT_FOUND:
return HttpResponseStatus.NOT_FOUND.code();
case ALREADY_EXISTS:
case ABORTED:
return HttpResponseStatus.CONFLICT.code();
case PERMISSION_DENIED:
return HttpResponseStatus.FORBIDDEN.code();
case RESOURCE_EXHAUSTED:
return HttpResponseStatus.TOO_MANY_REQUESTS.code();
case UNIMPLEMENTED:
return HttpResponseStatus.NOT_IMPLEMENTED.code();
case UNAVAILABLE:
return HttpResponseStatus.SERVICE_UNAVAILABLE.code();
case UNAUTHENTICATED:
return HttpResponseStatus.UNAUTHORIZED.code();
default:
return -1;
}
}
public boolean isOk() {
return Code.isOk(code.code);
}
public TriRpcStatus withCause(Throwable cause) {
return new TriRpcStatus(this.code, cause, this.description);
}
public TriRpcStatus withDescription(String description) {
return new TriRpcStatus(code, cause, description);
}
public TriRpcStatus appendDescription(String description) {
if (this.description == null) {
return withDescription(description);
} else {
String newDescription = this.description + "\n" + description;
return withDescription(newDescription);
}
}
public StatusRpcException asException() {
return new StatusRpcException(this);
}
public String toEncodedMessage() {
String output = limitSizeTo1KB(toMessage());
return encodeComponent(output);
}
public String toMessageWithoutCause() {
if (description != null) {
return String.format("%s : %s", code, description);
} else {
return code.toString();
}
}
public String toMessage() {
String msg = "";
if (cause == null) {
msg += description;
} else {
String placeHolder = description == null ? "" : description;
msg += StringUtils.toString(placeHolder, cause);
}
return msg;
}
public
|
TriRpcStatus
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java
|
{
"start": 5813,
"end": 7766
}
|
class ____ {
static final String STAGE = "stage";
static final String REASON = "reason";
static final String NODE = "node";
static final String DESCRIPTION = "description";
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Integer.toString(getShardId().getId()));
builder.field(Fields.STAGE, getStage());
builder.field(SnapshotStats.Fields.STATS, stats, params);
if (getNodeId() != null) {
builder.field(Fields.NODE, getNodeId());
}
if (getFailure() != null) {
builder.field(Fields.REASON, getFailure());
}
if (getDescription() != null) {
builder.field(Fields.DESCRIPTION, getDescription());
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SnapshotIndexShardStatus that = (SnapshotIndexShardStatus) o;
return stage == that.stage
&& Objects.equals(stats, that.stats)
&& Objects.equals(nodeId, that.nodeId)
&& Objects.equals(failure, that.failure)
&& Objects.equals(description, that.description);
}
@Override
public int hashCode() {
int result = stage != null ? stage.hashCode() : 0;
result = 31 * result + (stats != null ? stats.hashCode() : 0);
result = 31 * result + (nodeId != null ? nodeId.hashCode() : 0);
result = 31 * result + (failure != null ? failure.hashCode() : 0);
result = 31 * result + (description != null ? description.hashCode() : 0);
return result;
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
}
|
Fields
|
java
|
junit-team__junit5
|
junit-jupiter-migrationsupport/src/main/java/org/junit/jupiter/migrationsupport/rules/member/TestRuleAnnotatedField.java
|
{
"start": 740,
"end": 1200
}
|
class ____ extends AbstractTestRuleAnnotatedMember {
public TestRuleAnnotatedField(Object testInstance, Field field) {
super(retrieveTestRule(testInstance, field));
}
private static TestRule retrieveTestRule(Object testInstance, Field field) {
try {
return (TestRule) makeAccessible(field).get(testInstance);
}
catch (IllegalAccessException exception) {
throw ExceptionUtils.throwAsUncheckedException(exception);
}
}
}
|
TestRuleAnnotatedField
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java
|
{
"start": 2346,
"end": 3907
}
|
interface ____ {
/**
* Actions added by this plugin.
*/
default Collection<ActionHandler> getActions() {
return Collections.emptyList();
}
/**
* ActionType filters added by this plugin.
*/
default Collection<ActionFilter> getActionFilters() {
return Collections.emptyList();
}
/**
* Action filters applying to a single action added by this plugin.
*/
default Collection<MappedActionFilter> getMappedActionFilters() {
return Collections.emptyList();
}
/**
* Rest handlers added by this plugin.
*/
default Collection<RestHandler> getRestHandlers(
Settings settings,
NamedWriteableRegistry namedWriteableRegistry,
RestController restController,
ClusterSettings clusterSettings,
IndexScopedSettings indexScopedSettings,
SettingsFilter settingsFilter,
IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<DiscoveryNodes> nodesInCluster,
Predicate<NodeFeature> clusterSupportsFeature
) {
return Collections.emptyList();
}
/**
* Returns headers which should be copied through rest requests on to internal requests.
*/
default Collection<RestHeaderDefinition> getRestHeaders() {
return Collections.emptyList();
}
/**
* Returns headers which should be copied from internal requests into tasks.
*/
default Collection<String> getTaskHeaders() {
return Collections.emptyList();
}
final
|
ActionPlugin
|
java
|
quarkusio__quarkus
|
integration-tests/kafka-devservices/src/test/java/io/quarkus/it/kafka/continuoustesting/DevServicesDevModeTest.java
|
{
"start": 1019,
"end": 9171
}
|
class ____ extends BaseDevServiceTest {
@RegisterExtension
public static QuarkusDevModeTest test = new QuarkusDevModeTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
.deleteClass(KafkaEndpoint.class)
.addClass(BundledEndpoint.class)
.addClass(KafkaAdminManager.class)
.addAsResource(new StringAsset(
"quarkus.test.continuous-testing=disabled\n" +
"quarkus.kafka.devservices.provider=kafka-native\n" +
"quarkus.kafka.devservices.topic-partitions.test=2\n"),
"application.properties"))
.setTestArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClass(KafkaAdminTest.class));
@Test
public void testDevModeServiceUpdatesContainersOnConfigChange() {
// Interacting with the app will force a refresh
// Note that driving continuous testing concurrently can sometimes cause 500s caused by containers not yet being available on slow machines
ping();
List<Container> started = getKafkaContainers(DEVELOPMENT);
assertFalse(started.isEmpty());
Container container = started.get(0);
assertSharedContainer(container);
assertTrue(Arrays.stream(container.getPorts()).noneMatch(p -> p.getPublicPort() == 6377),
"Expected random port, but got: " + Arrays.toString(container.getPorts()));
int newPort = 6388;
test.modifyResourceFile("application.properties", s -> s + "quarkus.kafka.devservices.port=" + newPort);
// Force another refresh
ping();
List<Container> newContainers = getKafkaContainersExcludingExisting(DEVELOPMENT, started);
// We expect 1 new containers, since test was not refreshed.
// On some VMs that's what we get, but on others, a test-mode augmentation happens, and then we get two containers
assertEquals(1, newContainers.size(),
"There were " + newContainers.size() + " new containers, and should have been 1 or 2. New containers: "
+ prettyPrintContainerList(newContainers)
+ "\n Old containers: " + prettyPrintContainerList(started) + "\n All containers: "
+ prettyPrintContainerList(getKafkaContainers(DEVELOPMENT))); // this can be wrong
// We need to inspect the dev-mode container; we don't have a non-brittle way of distinguishing them, so just look in them all
boolean hasRightPort = newContainers.stream()
.anyMatch(newContainer -> hasPublicPort(newContainer, newPort));
assertTrue(hasRightPort,
"Expected port " + newPort + ", but got: "
+ newContainers.stream().map(c -> Arrays.toString(c.getPorts())).collect(Collectors.joining(", ")));
}
private void assertSharedContainer(Container container) {
assertEquals(DEVELOPMENT.name(), container.getLabels().get(Labels.QUARKUS_LAUNCH_MODE));
assertEquals("kafka", container.getLabels().get(Labels.QUARKUS_DEV_SERVICE));
assertEquals("kafka", container.getLabels().get("quarkus-dev-service-kafka"));
}
@Test
public void testDevModeServiceDoesNotRestartContainersOnCodeChange() {
ping();
List<Container> started = getKafkaContainers(DEVELOPMENT);
assertFalse(started.isEmpty());
Container container = started.get(0);
assertSharedContainer(container);
assertTrue(Arrays.stream(container.getPorts()).noneMatch(p -> p.getPublicPort() == 6377),
"Expected random port 6377, but got: " + Arrays.toString(container.getPorts()));
// Make a change that shouldn't affect dev services
test.modifySourceFile(BundledEndpoint.class, s -> s.replaceAll("topic", "tropic"));
ping();
List<Container> newContainers = getKafkaContainersExcludingExisting(DEVELOPMENT, started);
// No new containers should have spawned
assertEquals(0, newContainers.size(),
"New containers: " + newContainers + "\n Old containers: " + started + "\n All containers: "
+ getKafkaContainers(DEVELOPMENT)); // this can be wrong
}
@Test
public void testDevModeKeepsSameInstanceWhenRefreshedOnSecondChange() {
// Step 1: Ensure we have a dev service running
System.out.println("Step 1: Ensure we have a dev service running");
ping();
List<Container> step1Containers = getKafkaContainers(DEVELOPMENT);
assertFalse(step1Containers.isEmpty());
Container container = step1Containers.get(0);
assertSharedContainer(container);
assertFalse(hasPublicPort(container, 6377));
// Step 2: Make a change that should affect dev services
System.out.println("Step 2: Make a change that should affect dev services");
int someFixedPort = 36377;
// Make a change that SHOULD affect dev services
test.modifyResourceFile("application.properties",
s -> s
+ "quarkus.kafka.devservices.port=" + someFixedPort + "\n");
ping();
List<Container> step2Containers = getKafkaContainersExcludingExisting(DEVELOPMENT, step1Containers);
// New containers should have spawned
assertEquals(1, step2Containers.size(),
"New containers: " + step2Containers + "\n Old containers: " + step1Containers + "\n All containers: "
+ getKafkaContainers(DEVELOPMENT));
assertTrue(hasPublicPort(step2Containers.get(0), someFixedPort));
// Step 3: Now change back to a random port, which should cause a new container to spawn
System.out.println("Step 3: Now change back to a random port, which should cause a new container to spawn");
test.modifyResourceFile("application.properties",
s -> s.replaceAll("quarkus.kafka.devservices.port=" + someFixedPort, ""));
ping();
List<Container> step3Containers = getKafkaContainersExcludingExisting(DEVELOPMENT, step2Containers);
// New containers should have spawned
assertEquals(1, step3Containers.size(),
"New containers: " + step3Containers + "\n Old containers: " + step2Containers + "\n All containers: "
+ getKafkaContainers(DEVELOPMENT));
// Step 4: Now make a change that should not affect dev services
System.out.println("Step 4: Now make a change that should not affect dev services");
test.modifySourceFile(BundledEndpoint.class, s -> s.replaceAll("topic", "tropic"));
ping();
List<Container> step4Containers = getKafkaContainersExcludingExisting(DEVELOPMENT, step3Containers);
// No new containers should have spawned
assertEquals(0, step4Containers.size(),
"New containers: " + step4Containers + "\n Old containers: " + step3Containers + "\n All containers: "
+ getKafkaContainers(DEVELOPMENT)); // this can be wrong
// Step 5: Now make a change that should not affect dev services, but is not the same as the previous change
System.out.println(
"Step 5: Now make a change that should not affect dev services, but is not the same as the previous change");
test.modifySourceFile(BundledEndpoint.class, s -> s.replaceAll("tropic", "topic"));
ping();
List<Container> step5Containers = getKafkaContainersExcludingExisting(DEVELOPMENT, step3Containers);
// No new containers should have spawned
assertEquals(0, step5Containers.size(),
"New containers: " + step5Containers + "\n Old containers: " + step5Containers + "\n All containers: "
+ getKafkaContainers(DEVELOPMENT)); // this can be wrong
}
void ping() {
when().get("/kafka/partitions/test").then()
.statusCode(200)
.body(is("2"));
}
}
|
DevServicesDevModeTest
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/introspect/TestAutoDetect.java
|
{
"start": 645,
"end": 843
}
|
class ____ {
String _a;
protected ProtectedBean(String a) { this._a = a; }
}
// Private scalar constructor ok, but only if annotated (or level changed)
static
|
ProtectedBean
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/manager/FederationPolicyManager.java
|
{
"start": 2015,
"end": 2279
}
|
interface ____ design binds together {@link FederationAMRMProxyPolicy} and
* {@link FederationRouterPolicy} and provide lifecycle support for
* serialization and deserialization, to reduce configuration mistakes
* (combining incompatible policies).
*
*/
public
|
by
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/RedisJsonCommandBuilder.java
|
{
"start": 993,
"end": 14132
}
|
class ____<K, V> extends BaseRedisCommandBuilder<K, V> {
private final Supplier<JsonParser> parser;
RedisJsonCommandBuilder(RedisCodec<K, V> codec, Supplier<JsonParser> theParser) {
super(codec);
parser = theParser;
}
Command<K, V, List<Long>> jsonArrappend(K key, JsonPath jsonPath, JsonValue... jsonValues) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (jsonPath != null && !jsonPath.isRootPath()) {
// OPTIONAL as per API
args.add(jsonPath.toString());
}
for (JsonValue value : jsonValues) {
args.add(value.asByteBuffer().array());
}
return createCommand(JSON_ARRAPPEND, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, List<Long>> jsonArrappend(K key, JsonPath jsonPath, String... jsonValues) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (jsonPath != null && !jsonPath.isRootPath()) {
// OPTIONAL as per API
args.add(jsonPath.toString());
}
for (String value : jsonValues) {
args.add(value);
}
return createCommand(JSON_ARRAPPEND, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, List<Long>> jsonArrindex(K key, JsonPath jsonPath, JsonValue value, JsonRangeArgs range) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(jsonPath.toString());
args.add(value.asByteBuffer().array());
if (range != null) {
// OPTIONAL as per API
range.build(args);
}
return createCommand(JSON_ARRINDEX, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, List<Long>> jsonArrindex(K key, JsonPath jsonPath, String value, JsonRangeArgs range) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(jsonPath.toString());
args.add(value);
if (range != null) {
// OPTIONAL as per API
range.build(args);
}
return createCommand(JSON_ARRINDEX, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, List<Long>> jsonArrinsert(K key, JsonPath jsonPath, int index, JsonValue... values) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(jsonPath.toString());
args.add(index);
for (JsonValue value : values) {
args.add(value.asByteBuffer().array());
}
return createCommand(JSON_ARRINSERT, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, List<Long>> jsonArrinsert(K key, JsonPath jsonPath, int index, String... values) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(jsonPath.toString());
args.add(index);
for (String value : values) {
args.add(value);
}
return createCommand(JSON_ARRINSERT, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, List<Long>> jsonArrlen(K key, JsonPath jsonPath) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (jsonPath != null && !jsonPath.isRootPath()) {
// OPTIONAL as per API
args.add(jsonPath.toString());
}
return createCommand(JSON_ARRLEN, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, List<JsonValue>> jsonArrpop(K key, JsonPath jsonPath, int index) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (jsonPath != null) {
if (index != -1) {
// OPTIONAL as per API
args.add(jsonPath.toString());
args.add(index);
} else if (!jsonPath.isRootPath()) {
// OPTIONAL as per API
args.add(jsonPath.toString());
}
}
return createCommand(JSON_ARRPOP, new JsonValueListOutput<>(codec, parser.get()), args);
}
Command<K, V, List<String>> jsonArrpopRaw(K key, JsonPath jsonPath, int index) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (jsonPath != null) {
if (index != -1) {
// OPTIONAL as per API
args.add(jsonPath.toString());
args.add(index);
} else if (!jsonPath.isRootPath()) {
// OPTIONAL as per API
args.add(jsonPath.toString());
}
}
return createCommand(JSON_ARRPOP, new StringListOutput<>(codec), args);
}
Command<K, V, List<Long>> jsonArrtrim(K key, JsonPath jsonPath, JsonRangeArgs range) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(jsonPath.toString());
if (range != null) {
range.build(args);
}
return createCommand(JSON_ARRTRIM, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, Long> jsonClear(K key, JsonPath jsonPath) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (jsonPath != null && !jsonPath.isRootPath()) {
// OPTIONAL as per API
args.add(jsonPath.toString());
}
return createCommand(JSON_CLEAR, new IntegerOutput<>(codec), args);
}
Command<K, V, List<JsonValue>> jsonGet(K key, JsonGetArgs options, JsonPath... jsonPaths) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (options != null) {
// OPTIONAL as per API
options.build(args);
}
if (jsonPaths != null) {
// OPTIONAL as per API
for (JsonPath jsonPath : jsonPaths) {
if (jsonPath != null) {
args.add(jsonPath.toString());
}
}
}
return createCommand(JSON_GET, new JsonValueListOutput<>(codec, parser.get()), args);
}
Command<K, V, List<String>> jsonGetRaw(K key, JsonGetArgs options, JsonPath... jsonPaths) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (options != null) {
// OPTIONAL as per API
options.build(args);
}
if (jsonPaths != null) {
// OPTIONAL as per API
for (JsonPath jsonPath : jsonPaths) {
if (jsonPath != null) {
args.add(jsonPath.toString());
}
}
}
return createCommand(JSON_GET, new StringListOutput<>(codec), args);
}
Command<K, V, String> jsonMerge(K key, JsonPath jsonPath, JsonValue value) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(jsonPath.toString());
args.add(value.asByteBuffer().array());
return createCommand(JSON_MERGE, new StatusOutput<>(codec), args);
}
Command<K, V, String> jsonMerge(K key, JsonPath jsonPath, String value) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(jsonPath.toString());
args.add(value);
return createCommand(JSON_MERGE, new StatusOutput<>(codec), args);
}
Command<K, V, List<JsonValue>> jsonMGet(JsonPath jsonPath, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
args.add(jsonPath.toString());
return createCommand(JSON_MGET, new JsonValueListOutput<>(codec, parser.get()), args);
}
Command<K, V, List<String>> jsonMGetRaw(JsonPath jsonPath, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
args.add(jsonPath.toString());
return createCommand(JSON_MGET, new StringListOutput<>(codec), args);
}
Command<K, V, String> jsonMSet(List<JsonMsetArgs<K, V>> arguments) {
notEmpty(arguments.toArray());
CommandArgs<K, V> args = new CommandArgs<>(codec);
for (JsonMsetArgs<K, V> argument : arguments) {
argument.build(args);
}
return createCommand(JSON_MSET, new StatusOutput<>(codec), args);
}
Command<K, V, List<Number>> jsonNumincrby(K key, JsonPath jsonPath, Number number) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(jsonPath.toString());
args.add(number.toString());
return createCommand(JSON_NUMINCRBY, new NumberListOutput<>(codec), args);
}
Command<K, V, List<V>> jsonObjkeys(K key, JsonPath jsonPath) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (jsonPath != null && !jsonPath.isRootPath()) {
// OPTIONAL as per API
args.add(jsonPath.toString());
}
return createCommand(JSON_OBJKEYS, new ValueListOutput<>(codec), args);
}
Command<K, V, List<Long>> jsonObjlen(K key, JsonPath jsonPath) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (jsonPath != null && !jsonPath.isRootPath()) {
// OPTIONAL as per API
args.add(jsonPath.toString());
}
return createCommand(JSON_OBJLEN, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, String> jsonSet(K key, JsonPath jsonPath, JsonValue value, JsonSetArgs options) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(jsonPath.toString());
args.add(value.asByteBuffer().array());
if (options != null) {
// OPTIONAL as per API
options.build(args);
}
return createCommand(JSON_SET, new StatusOutput<>(codec), args);
}
Command<K, V, String> jsonSet(K key, JsonPath jsonPath, String value, JsonSetArgs options) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(jsonPath.toString());
args.add(value);
if (options != null) {
// OPTIONAL as per API
options.build(args);
}
return createCommand(JSON_SET, new StatusOutput<>(codec), args);
}
Command<K, V, List<Long>> jsonStrappend(K key, JsonPath jsonPath, JsonValue value) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (jsonPath != null && !jsonPath.isRootPath()) {
// OPTIONAL as per API
args.add(jsonPath.toString());
}
args.add(value.asByteBuffer().array());
return createCommand(JSON_STRAPPEND, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, List<Long>> jsonStrappend(K key, JsonPath jsonPath, String jsonString) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (jsonPath != null && !jsonPath.isRootPath()) {
args.add(jsonPath.toString());
}
args.add(jsonString.getBytes());
return createCommand(JSON_STRAPPEND, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, List<Long>> jsonStrlen(K key, JsonPath jsonPath) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (jsonPath != null && !jsonPath.isRootPath()) {
// OPTIONAL as per API
args.add(jsonPath.toString());
}
return createCommand(JSON_STRLEN, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, List<Long>> jsonToggle(K key, JsonPath jsonPath) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(jsonPath.toString());
return createCommand(JSON_TOGGLE, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, Long> jsonDel(K key, JsonPath jsonPath) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (jsonPath != null && !jsonPath.isRootPath()) {
// OPTIONAL as per API
args.add(jsonPath.toString());
}
return createCommand(JSON_DEL, new IntegerOutput<>(codec), args);
}
Command<K, V, List<JsonType>> jsonType(K key, JsonPath jsonPath) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (jsonPath != null && !jsonPath.isRootPath()) {
// OPTIONAL as per API
args.add(jsonPath.toString());
}
return createCommand(JSON_TYPE, new JsonTypeListOutput<>(codec), args);
}
}
|
RedisJsonCommandBuilder
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/PathInterfaceImplementorTest.java
|
{
"start": 1275,
"end": 1382
}
|
interface ____ {
@GET
String get();
}
@RequestScoped
public static
|
BravoResource
|
java
|
reactor__reactor-core
|
reactor-core/src/withMicrometerTest/java/io/micrometer/scopedvalue/ScopeHolder.java
|
{
"start": 961,
"end": 1427
}
|
class ____ {
private static final ThreadLocal<Scope> SCOPE = new ThreadLocal<>();
public static ScopedValue currentValue() {
Scope scope = SCOPE.get();
return scope == null ? null : scope.scopedValue;
}
public static Scope get() {
return SCOPE.get();
}
static void set(Scope scope) {
SCOPE.set(scope);
}
@VisibleForTesting
public static void remove() {
SCOPE.remove();
}
}
|
ScopeHolder
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionInternalPriorityQueueSetTest.java
|
{
"start": 1067,
"end": 1505
}
|
class ____ extends InternalPriorityQueueTestBase {
@Override
protected InternalPriorityQueue<TestElement> newPriorityQueue(int initialCapacity) {
return new BatchExecutionInternalPriorityQueueSet<>(
TEST_ELEMENT_PRIORITY_COMPARATOR, initialCapacity);
}
@Override
protected boolean testSetSemanticsAgainstDuplicateElements() {
return true;
}
}
|
BatchExecutionInternalPriorityQueueSetTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/time/DurationGetTemporalUnitTest.java
|
{
"start": 2080,
"end": 2864
}
|
class ____ {
private static final long seconds = Duration.ZERO.get(SECONDS);
private static final long nanos = Duration.ZERO.get(NANOS);
// BUG: Diagnostic contains: Duration.ZERO.toMillis();
private static final long days = Duration.ZERO.get(MILLIS);
}
""")
.doTest();
}
@Test
public void durationGetWithRandomTemporalUnit() {
helper
.addSourceLines(
"TestClass.java",
"import static java.time.temporal.ChronoUnit.DAYS;",
"import static java.time.temporal.ChronoUnit.SECONDS;",
"import java.time.Duration;",
"import java.time.temporal.TemporalUnit;",
"import java.util.Random;",
"public
|
TestClass
|
java
|
apache__flink
|
flink-kubernetes/src/main/java/org/apache/flink/kubernetes/kubeclient/decorators/InitTaskManagerDecorator.java
|
{
"start": 2520,
"end": 10837
}
|
class ____ extends AbstractKubernetesStepDecorator {
private final KubernetesTaskManagerParameters kubernetesTaskManagerParameters;
private final Configuration flinkConfig;
public InitTaskManagerDecorator(
KubernetesTaskManagerParameters kubernetesTaskManagerParameters) {
this.kubernetesTaskManagerParameters = checkNotNull(kubernetesTaskManagerParameters);
this.flinkConfig = checkNotNull(kubernetesTaskManagerParameters.getFlinkConfiguration());
}
@Override
public FlinkPod decorateFlinkPod(FlinkPod flinkPod) {
final PodBuilder basicPodBuilder = new PodBuilder(flinkPod.getPodWithoutMainContainer());
// Overwrite fields
final String serviceAccountName =
KubernetesUtils.resolveUserDefinedValue(
flinkConfig,
KubernetesConfigOptions.TASK_MANAGER_SERVICE_ACCOUNT,
kubernetesTaskManagerParameters.getServiceAccount(),
KubernetesUtils.getServiceAccount(flinkPod),
"service account");
final String dnsPolicy =
KubernetesUtils.resolveDNSPolicy(
flinkPod.getPodWithoutMainContainer().getSpec().getDnsPolicy(),
kubernetesTaskManagerParameters.isHostNetworkEnabled());
if (flinkPod.getPodWithoutMainContainer().getSpec().getRestartPolicy() != null) {
logger.info(
"The restart policy of TaskManager pod will be overwritten to 'never' "
+ "since it should not be restarted.");
}
basicPodBuilder
.withApiVersion(Constants.API_VERSION)
.editOrNewMetadata()
.withName(kubernetesTaskManagerParameters.getPodName())
.endMetadata()
.editOrNewSpec()
.withServiceAccount(serviceAccountName)
.withServiceAccountName(serviceAccountName)
.withRestartPolicy(Constants.RESTART_POLICY_OF_NEVER)
.withHostNetwork(kubernetesTaskManagerParameters.isHostNetworkEnabled())
.withDnsPolicy(dnsPolicy)
.endSpec();
// Merge fields
basicPodBuilder
.editOrNewMetadata()
.addToLabels(kubernetesTaskManagerParameters.getLabels())
.addToAnnotations(kubernetesTaskManagerParameters.getAnnotations())
.endMetadata()
.editOrNewSpec()
.addToImagePullSecrets(kubernetesTaskManagerParameters.getImagePullSecrets())
.addToNodeSelector(kubernetesTaskManagerParameters.getNodeSelector())
.addAllToTolerations(
kubernetesTaskManagerParameters.getTolerations().stream()
.map(e -> KubernetesToleration.fromMap(e).getInternalResource())
.collect(Collectors.toList()))
.endSpec();
// Add node affinity.
// https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#node-affinity
Set<String> blockedNodes = kubernetesTaskManagerParameters.getBlockedNodes();
if (!blockedNodes.isEmpty()) {
basicPodBuilder
.editOrNewSpec()
.editOrNewAffinity()
.withNodeAffinity(
generateNodeAffinity(
kubernetesTaskManagerParameters.getNodeNameLabel(),
blockedNodes))
.endAffinity()
.endSpec();
}
final Container basicMainContainer = decorateMainContainer(flinkPod.getMainContainer());
return new FlinkPod.Builder(flinkPod)
.withPod(basicPodBuilder.build())
.withMainContainer(basicMainContainer)
.build();
}
private NodeAffinity generateNodeAffinity(String labelKey, Set<String> blockedNodes) {
NodeSelectorRequirement nodeSelectorRequirement =
new NodeSelectorRequirement(labelKey, "NotIn", new ArrayList<>(blockedNodes));
NodeAffinityBuilder nodeAffinityBuilder = new NodeAffinityBuilder();
return nodeAffinityBuilder
.withNewRequiredDuringSchedulingIgnoredDuringExecution()
.addNewNodeSelectorTerm()
.addToMatchExpressions(nodeSelectorRequirement)
.endNodeSelectorTerm()
.endRequiredDuringSchedulingIgnoredDuringExecution()
.build();
}
private Container decorateMainContainer(Container container) {
final ContainerBuilder mainContainerBuilder = new ContainerBuilder(container);
// Overwrite fields
final ResourceRequirements requirementsInPodTemplate =
container.getResources() == null
? new ResourceRequirements()
: container.getResources();
final ResourceRequirements resourceRequirements =
KubernetesUtils.getResourceRequirements(
requirementsInPodTemplate,
kubernetesTaskManagerParameters.getTaskManagerMemoryMB(),
kubernetesTaskManagerParameters.getTaskManagerMemoryLimitFactor(),
kubernetesTaskManagerParameters.getTaskManagerCPU(),
kubernetesTaskManagerParameters.getTaskManagerCPULimitFactor(),
kubernetesTaskManagerParameters.getTaskManagerExternalResources(),
kubernetesTaskManagerParameters.getTaskManagerExternalResourceConfigKeys());
final String image =
KubernetesUtils.resolveUserDefinedValue(
flinkConfig,
KubernetesConfigOptions.CONTAINER_IMAGE,
kubernetesTaskManagerParameters.getImage(),
container.getImage(),
"main container image");
final String imagePullPolicy =
KubernetesUtils.resolveUserDefinedValue(
flinkConfig,
KubernetesConfigOptions.CONTAINER_IMAGE_PULL_POLICY,
kubernetesTaskManagerParameters.getImagePullPolicy().name(),
container.getImagePullPolicy(),
"main container image pull policy");
mainContainerBuilder
.withName(Constants.MAIN_CONTAINER_NAME)
.withImage(image)
.withImagePullPolicy(imagePullPolicy)
.withResources(resourceRequirements);
// Merge fields
mainContainerBuilder
.addAllToPorts(getContainerPorts())
.addAllToEnv(getCustomizedEnvs())
.addNewEnv()
.withName(ENV_FLINK_POD_NODE_ID)
.withValueFrom(
new EnvVarSourceBuilder()
.withNewFieldRef(API_VERSION, POD_NODE_ID_FIELD_PATH)
.build())
.endEnv();
getFlinkLogDirEnv().ifPresent(mainContainerBuilder::addToEnv);
return mainContainerBuilder.build();
}
private List<ContainerPort> getContainerPorts() {
if (kubernetesTaskManagerParameters.isHostNetworkEnabled()) {
return Collections.emptyList();
}
return Collections.singletonList(
new ContainerPortBuilder()
.withName(Constants.TASK_MANAGER_RPC_PORT_NAME)
.withContainerPort(kubernetesTaskManagerParameters.getRPCPort())
.build());
}
private List<EnvVar> getCustomizedEnvs() {
return kubernetesTaskManagerParameters.getEnvironments().entrySet().stream()
.map(kv -> new EnvVar(kv.getKey(), kv.getValue(), null))
.collect(Collectors.toList());
}
private Optional<EnvVar> getFlinkLogDirEnv() {
return kubernetesTaskManagerParameters
.getFlinkLogDirInPod()
.map(logDir -> new EnvVar(Constants.ENV_FLINK_LOG_DIR, logDir, null));
}
}
|
InitTaskManagerDecorator
|
java
|
spring-projects__spring-boot
|
buildSrc/src/test/java/org/springframework/boot/build/architecture/objects/requireNonNullWithString/RequireNonNullWithString.java
|
{
"start": 740,
"end": 869
}
|
class ____ {
void exampleMethod() {
Objects.requireNonNull(new Object(), "Object cannot be null");
}
}
|
RequireNonNullWithString
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStep.java
|
{
"start": 1079,
"end": 2066
}
|
class ____ extends AsyncRetryDuringSnapshotActionStep {
public static final String NAME = "aliases";
public ShrinkSetAliasStep(StepKey key, StepKey nextStepKey, Client client) {
super(key, nextStepKey, client);
}
@Override
public boolean isRetryable() {
return true;
}
@Override
public void performDuringNoSnapshot(IndexMetadata indexMetadata, ProjectMetadata currentProject, ActionListener<Void> listener) {
// get source index
String indexName = indexMetadata.getIndex().getName();
// get target shrink index
LifecycleExecutionState lifecycleState = indexMetadata.getLifecycleExecutionState();
String targetIndexName = getShrinkIndexName(indexName, lifecycleState);
deleteSourceIndexAndTransferAliases(getClient(currentProject.id()), indexMetadata, targetIndexName, listener, true);
}
@Override
public boolean indexSurvives() {
return false;
}
}
|
ShrinkSetAliasStep
|
java
|
google__dagger
|
dagger-compiler/main/java/dagger/internal/codegen/validation/BindingMethodValidator.java
|
{
"start": 8885,
"end": 8997
}
|
class ____ all {@code throws}-declared throwables must extend, other than {@link
* Error}.
*/
protected
|
that
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RecordCreatorFactory.java
|
{
"start": 5693,
"end": 6509
}
|
class ____ {
private Name host;
private int port;
/**
* Creates an object with a host and port pair.
*
* @param host the hostname/ip
* @param port the port value
*/
public HostPortInfo(Name host, int port) {
this.setHost(host);
this.setPort(port);
}
/**
* Return the host name.
* @return the host name.
*/
Name getHost() {
return host;
}
/**
* Set the host name.
* @param host the host name.
*/
void setHost(Name host) {
this.host = host;
}
/**
* Get the port.
* @return the port.
*/
int getPort() {
return port;
}
/**
* Set the port.
* @param port the port.
*/
void setPort(int port) {
this.port = port;
}
}
}
|
HostPortInfo
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableFromIterable.java
|
{
"start": 7796,
"end": 11536
}
|
class ____<T> extends BaseRangeSubscription<T> {
private static final long serialVersionUID = -6022804456014692607L;
final ConditionalSubscriber<? super T> downstream;
IteratorConditionalSubscription(ConditionalSubscriber<? super T> actual, Iterator<? extends T> it) {
super(it);
this.downstream = actual;
}
@Override
void fastPath() {
Iterator<? extends T> it = this.iterator;
ConditionalSubscriber<? super T> a = downstream;
for (;;) {
if (cancelled) {
return;
}
T t;
try {
t = it.next();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
if (cancelled) {
return;
}
if (t == null) {
a.onError(new NullPointerException("Iterator.next() returned a null value"));
return;
} else {
a.tryOnNext(t);
}
if (cancelled) {
return;
}
boolean b;
try {
b = it.hasNext();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
if (!b) {
if (!cancelled) {
a.onComplete();
}
return;
}
}
}
@Override
void slowPath(long r) {
long e = 0L;
Iterator<? extends T> it = this.iterator;
ConditionalSubscriber<? super T> a = downstream;
for (;;) {
while (e != r) {
if (cancelled) {
return;
}
T t;
try {
t = it.next();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
if (cancelled) {
return;
}
boolean b;
if (t == null) {
a.onError(new NullPointerException("Iterator.next() returned a null value"));
return;
} else {
b = a.tryOnNext(t);
}
if (cancelled) {
return;
}
boolean hasNext;
try {
hasNext = it.hasNext();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
if (!hasNext) {
if (!cancelled) {
a.onComplete();
}
return;
}
if (b) {
e++;
}
}
r = get();
if (e == r) {
r = addAndGet(-e);
if (r == 0L) {
return;
}
e = 0L;
}
}
}
}
}
|
IteratorConditionalSubscription
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/PhysicalSlotRequestBulkChecker.java
|
{
"start": 1565,
"end": 2233
}
|
interface ____ {
/**
* Starts the bulk checker by initializing the main thread executor.
*
* @param mainThreadExecutor the main thread executor of the job master
*/
void start(ComponentMainThreadExecutor mainThreadExecutor);
/**
* Starts tracking the fulfillability of a {@link PhysicalSlotRequestBulk} with timeout.
*
* @param bulk {@link PhysicalSlotRequestBulk} to track
* @param timeout timeout after which the bulk should be canceled if it is still not
* fulfillable.
*/
void schedulePendingRequestBulkTimeoutCheck(PhysicalSlotRequestBulk bulk, Duration timeout);
}
|
PhysicalSlotRequestBulkChecker
|
java
|
spring-projects__spring-boot
|
module/spring-boot-data-mongodb/src/test/java/org/springframework/boot/data/mongodb/autoconfigure/DataMongoAutoConfigurationTests.java
|
{
"start": 16321,
"end": 16376
}
|
class ____ {
@Nullable LocalDateTime date;
}
}
|
Sample
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java
|
{
"start": 1883,
"end": 5127
}
|
class ____ {
private File INPUT_FILE;
private String input;
private String outputExpect;
Path OUTPUT_DIR;
FileSystem fs;
public TestStreamXmlRecordReader() throws IOException {
INPUT_FILE = new File("target/input.xml");
input = "<xmltag>\t\nroses.are.red\t\nviolets.are.blue\t\n"
+ "bunnies.are.pink\t\n</xmltag>\t\n";
outputExpect = input;
}
protected void assertOutput(String expectedOutput, String output)
throws IOException {
String[] words = expectedOutput.split("\t\n");
Set<String> expectedWords = new HashSet<String>(Arrays.asList(words));
words = output.split("\t\n");
Set<String> returnedWords = new HashSet<String>(Arrays.asList(words));
assertTrue(returnedWords.containsAll(expectedWords));
}
protected void checkOutput() throws IOException {
File outFile = new File(OUTPUT_DIR.toString());
Path outPath = new Path(outFile.getAbsolutePath(), "part-r-00000");
String output = slurpHadoop(outPath, fs);
fs.delete(outPath, true);
outputExpect = "<PATTERN>\n" + outputExpect + "</PATTERN>";
System.err.println("outEx1=" + outputExpect);
System.err.println(" out1=" + output);
assertOutput(outputExpect, output);
}
private String slurpHadoop(Path p, FileSystem fs) throws IOException {
int len = (int) fs.getFileStatus(p).getLen();
byte[] buf = new byte[len];
FSDataInputStream in = fs.open(p);
String contents = null;
try {
in.readFully(in.getPos(), buf);
contents = new String(buf, StandardCharsets.UTF_8);
} finally {
in.close();
}
return contents;
}
@BeforeEach
public void createInput() throws IOException {
FileOutputStream out = new FileOutputStream(INPUT_FILE.getAbsoluteFile());
String dummyXmlStartTag = "<PATTERN>\n";
String dummyXmlEndTag = "</PATTERN>\n";
out.write(dummyXmlStartTag.getBytes(StandardCharsets.UTF_8));
out.write(input.getBytes(StandardCharsets.UTF_8));
out.write(dummyXmlEndTag.getBytes(StandardCharsets.UTF_8));
out.close();
}
@Test
public void testStreamXmlRecordReader() throws Exception {
Job job = Job.getInstance();
Configuration conf = job.getConfiguration();
job.setJarByClass(TestStreamXmlRecordReader.class);
job.setMapperClass(Mapper.class);
conf.set("stream.recordreader.class",
"org.apache.hadoop.streaming.mapreduce.StreamXmlRecordReader");
conf.set("stream.recordreader.begin", "<PATTERN>");
conf.set("stream.recordreader.end", "</PATTERN>");
job.setInputFormatClass(StreamInputFormat.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.addInputPath(job, new Path("target/input.xml"));
OUTPUT_DIR = new Path("target/output");
fs = FileSystem.get(conf);
if (fs.exists(OUTPUT_DIR)) {
fs.delete(OUTPUT_DIR, true);
}
FileOutputFormat.setOutputPath(job, OUTPUT_DIR);
boolean ret = job.waitForCompletion(true);
assertEquals(true, ret);
checkOutput();
}
@AfterEach
public void tearDown() throws IOException {
fs.delete(OUTPUT_DIR, true);
}
}
|
TestStreamXmlRecordReader
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhance/internal/bytebuddy/DirtyCheckingWithEmbeddableAndNonVisibleGenericMappedSuperclassTest.java
|
{
"start": 6840,
"end": 7192
}
|
class ____ extends MyNonVisibleGenericMappedSuperclass<MyEmbeddable> {
@Id
private Integer id;
public MyEntity() {
}
private MyEntity(Integer id, String text) {
this.id = id;
setEmbedded( new MyEmbeddable( text ) );
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
}
}
|
MyEntity
|
java
|
apache__camel
|
components/camel-vertx/camel-vertx-http/src/main/java/org/apache/camel/component/vertx/http/VertxHttpComponent.java
|
{
"start": 1984,
"end": 14918
}
|
class ____ extends HeaderFilterStrategyComponent
implements RestProducerFactory, SSLContextParametersAware {
private volatile boolean managedVertx;
@Metadata(label = "security")
private String basicAuthUsername;
@Metadata(label = "security")
private String basicAuthPassword;
@Metadata(label = "security")
private String bearerToken;
@Metadata(label = "security")
private SSLContextParameters sslContextParameters;
@Metadata(label = "proxy")
private String proxyHost;
@Metadata(label = "proxy")
private Integer proxyPort;
@Metadata(label = "proxy", enums = "HTTP,SOCKS4,SOCKS5")
private ProxyType proxyType;
@Metadata(label = "proxy")
private String proxyUsername;
@Metadata(label = "proxy")
private String proxyPassword;
@Metadata(label = "advanced")
private Vertx vertx;
@Metadata(label = "advanced")
private VertxOptions vertxOptions;
@Metadata(label = "advanced")
private VertxHttpBinding vertxHttpBinding;
@Metadata(label = "security", defaultValue = "false")
private boolean useGlobalSslContextParameters;
@Metadata(label = "advanced")
private boolean allowJavaSerializedObject;
@Metadata(label = "producer", defaultValue = "true")
private boolean responsePayloadAsByteArray = true;
@Metadata(label = "advanced")
private WebClientOptions webClientOptions;
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
VertxHttpConfiguration configuration = new VertxHttpConfiguration();
configuration.setResponsePayloadAsByteArray(responsePayloadAsByteArray);
URI uriHttpUriAddress = new URI(UnsafeUriCharactersEncoder.encodeHttpURI(remaining));
VertxHttpEndpoint endpoint = new VertxHttpEndpoint(uri, this, configuration);
setProperties(endpoint, parameters);
if (configuration.getBasicAuthUsername() == null) {
configuration.setBasicAuthUsername(getBasicAuthUsername());
}
if (configuration.getBasicAuthPassword() == null) {
configuration.setBasicAuthPassword(getBasicAuthPassword());
}
if (configuration.getBearerToken() == null) {
configuration.setBearerToken(getBearerToken());
}
if (configuration.getSslContextParameters() == null) {
configuration.setSslContextParameters(getSslContextParameters());
}
if (configuration.getProxyType() == null) {
configuration.setProxyType(getProxyType());
}
if (configuration.getProxyHost() == null) {
configuration.setProxyHost(getProxyHost());
}
if (configuration.getProxyPort() == null) {
configuration.setProxyPort(getProxyPort());
}
if (configuration.getProxyUsername() == null) {
configuration.setProxyUsername(getProxyUsername());
}
if (configuration.getProxyPassword() == null) {
configuration.setProxyPassword(getProxyPassword());
}
if (configuration.getSslContextParameters() == null) {
configuration.setSslContextParameters(retrieveGlobalSslContextParameters());
}
if (configuration.getVertxHttpBinding() == null) {
configuration.setVertxHttpBinding(getVertxHttpBinding());
}
if (configuration.getHeaderFilterStrategy() == null) {
configuration.setHeaderFilterStrategy(getHeaderFilterStrategy());
}
if (configuration.getWebClientOptions() == null) {
configuration.setWebClientOptions(getWebClientOptions());
}
// Recreate the http uri with the remaining parameters which the endpoint did not use
URI httpUri = URISupport.createRemainingURI(
new URI(
uriHttpUriAddress.getScheme(),
uriHttpUriAddress.getUserInfo(),
uriHttpUriAddress.getHost(),
uriHttpUriAddress.getPort(),
uriHttpUriAddress.getPath(),
uriHttpUriAddress.getQuery(),
uriHttpUriAddress.getFragment()),
parameters);
configuration.setHttpUri(httpUri);
return endpoint;
}
@Override
public Producer createProducer(
CamelContext camelContext, String host,
String verb, String basePath, String uriTemplate, String queryParameters, String consumes,
String produces, RestConfiguration configuration, Map<String, Object> parameters)
throws Exception {
// avoid leading slash
basePath = FileUtil.stripLeadingSeparator(basePath);
uriTemplate = FileUtil.stripLeadingSeparator(uriTemplate);
// get the endpoint
String scheme = "vertx-http:";
String url = scheme + host;
if (!ObjectHelper.isEmpty(basePath)) {
url += "/" + basePath;
}
if (!ObjectHelper.isEmpty(uriTemplate)) {
url += "/" + uriTemplate;
}
RestConfiguration config = configuration;
if (config == null) {
config = CamelContextHelper.getRestConfiguration(getCamelContext(), null, scheme);
}
Map<String, Object> map = new HashMap<>();
// build query string, and append any endpoint configuration properties
if (config.getProducerComponent() == null || config.getProducerComponent().equals(scheme)) {
// setup endpoint options
if (config.getEndpointProperties() != null && !config.getEndpointProperties().isEmpty()) {
map.putAll(config.getEndpointProperties());
}
}
// get the endpoint
String query = URISupport.createQueryString(map);
if (!query.isEmpty()) {
url = url + "?" + query;
}
parameters = parameters != null ? new HashMap<>(parameters) : new HashMap<>();
// there are cases where we might end up here without component being created beforehand
// we need to abide by the component properties specified in the parameters when creating
// the component, one such case is when we switch from "http" to "https" component name
RestProducerFactoryHelper.setupComponentFor(url, camelContext, (Map<String, Object>) parameters.remove("component"));
VertxHttpEndpoint endpoint = (VertxHttpEndpoint) camelContext.getEndpoint(url, parameters);
String path = uriTemplate != null ? uriTemplate : basePath;
HeaderFilterStrategy headerFilterStrategy
= resolveAndRemoveReferenceParameter(parameters, "headerFilterStrategy", HeaderFilterStrategy.class);
if (headerFilterStrategy != null) {
endpoint.getConfiguration().setHeaderFilterStrategy(headerFilterStrategy);
} else {
endpoint.getConfiguration().setHeaderFilterStrategy(new VertxHttpRestHeaderFilterStrategy(path, queryParameters));
}
// the endpoint must be started before creating the producer
ServiceHelper.startService(endpoint);
return endpoint.createProducer();
}
@Override
protected void doInit() throws Exception {
if (vertx == null) {
Set<Vertx> vertxes = getCamelContext().getRegistry().findByType(Vertx.class);
if (vertxes.size() == 1) {
vertx = vertxes.iterator().next();
}
}
}
@Override
protected void doStart() throws Exception {
super.doStart();
if (vertx == null) {
if (vertxOptions != null) {
vertx = Vertx.vertx(vertxOptions);
} else {
vertx = Vertx.vertx();
}
managedVertx = true;
}
}
@Override
protected void doStop() throws Exception {
super.doStop();
if (managedVertx && vertx != null) {
vertx.close();
}
vertx = null;
}
public Vertx getVertx() {
return vertx;
}
/**
* To use an existing vertx instead of creating a new instance
*/
public void setVertx(Vertx vertx) {
this.vertx = vertx;
}
public VertxOptions getVertxOptions() {
return vertxOptions;
}
/**
* To provide a custom set of vertx options for configuring vertx
*/
public void setVertxOptions(VertxOptions vertxOptions) {
this.vertxOptions = vertxOptions;
}
public VertxHttpBinding getVertxHttpBinding() {
if (vertxHttpBinding == null) {
vertxHttpBinding = new DefaultVertxHttpBinding();
}
return vertxHttpBinding;
}
/**
* A custom VertxHttpBinding which can control how to bind between Vert.x and Camel
*/
public void setVertxHttpBinding(VertxHttpBinding vertxHttpBinding) {
this.vertxHttpBinding = vertxHttpBinding;
}
@Override
public boolean isUseGlobalSslContextParameters() {
return this.useGlobalSslContextParameters;
}
/**
* Enable usage of global SSL context parameters
*/
@Override
public void setUseGlobalSslContextParameters(boolean useGlobalSslContextParameters) {
this.useGlobalSslContextParameters = useGlobalSslContextParameters;
}
public boolean isAllowJavaSerializedObject() {
return allowJavaSerializedObject;
}
/**
* Whether to allow java serialization when a request has the Content-Type application/x-java-serialized-object
* <p/>
* This is disabled by default. If you enable this, be aware that Java will deserialize the incoming data from the
* request. This can be a potential security risk.
*/
public void setAllowJavaSerializedObject(boolean allowJavaSerializedObject) {
this.allowJavaSerializedObject = allowJavaSerializedObject;
}
public boolean isResponsePayloadAsByteArray() {
return responsePayloadAsByteArray;
}
/**
* Whether the response body should be byte[] or as io.vertx.core.buffer.Buffer
*/
public void setResponsePayloadAsByteArray(boolean responsePayloadAsByteArray) {
this.responsePayloadAsByteArray = responsePayloadAsByteArray;
}
/**
* The proxy server host address
*/
public void setProxyHost(String proxyHost) {
this.proxyHost = proxyHost;
}
public String getProxyHost() {
return proxyHost;
}
/**
* The proxy server port
*/
public void setProxyPort(Integer proxyPort) {
this.proxyPort = proxyPort;
}
public Integer getProxyPort() {
return proxyPort;
}
/**
* The proxy server username if authentication is required
*/
public void setProxyUsername(String proxyUsername) {
this.proxyUsername = proxyUsername;
}
public String getProxyUsername() {
return proxyUsername;
}
/**
* The proxy server password if authentication is required
*/
public void setProxyPassword(String proxyPassword) {
this.proxyPassword = proxyPassword;
}
public String getProxyPassword() {
return proxyPassword;
}
/**
* The proxy server type
*/
public void setProxyType(ProxyType proxyType) {
this.proxyType = proxyType;
}
public ProxyType getProxyType() {
return proxyType;
}
/**
* The user name to use for basic authentication
*/
public void setBasicAuthUsername(String basicAuthUsername) {
this.basicAuthUsername = basicAuthUsername;
}
public String getBasicAuthUsername() {
return basicAuthUsername;
}
/**
* The password to use for basic authentication
*/
public void setBasicAuthPassword(String basicAuthPassword) {
this.basicAuthPassword = basicAuthPassword;
}
public String getBasicAuthPassword() {
return basicAuthPassword;
}
/**
* The bearer token to use for bearer token authentication
*/
public void setBearerToken(String bearerToken) {
this.bearerToken = bearerToken;
}
public String getBearerToken() {
return bearerToken;
}
/**
* To configure security using SSLContextParameters
*/
public SSLContextParameters getSslContextParameters() {
return sslContextParameters;
}
public void setSslContextParameters(SSLContextParameters sslContextParameters) {
this.sslContextParameters = sslContextParameters;
}
public WebClientOptions getWebClientOptions() {
return webClientOptions;
}
/**
* To provide a custom set of options for configuring vertx web client
*/
public void setWebClientOptions(WebClientOptions webClientOptions) {
this.webClientOptions = webClientOptions;
}
}
|
VertxHttpComponent
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/util/ReflectionTestUtils.java
|
{
"start": 2867,
"end": 5105
}
|
class ____ {
private static final String SETTER_PREFIX = "set";
private static final String GETTER_PREFIX = "get";
private static final Log logger = LogFactory.getLog(ReflectionTestUtils.class);
private static final boolean SPRING_AOP_PRESENT = ClassUtils.isPresent(
"org.springframework.aop.framework.Advised", ReflectionTestUtils.class.getClassLoader());
/**
* Set the {@linkplain Field field} with the given {@code name} on the
* provided {@code targetObject} to the supplied {@code value}.
* <p>This method delegates to {@link #setField(Object, String, Object, Class)},
* supplying {@code null} for the {@code type} argument.
* @param targetObject the target object on which to set the field; never {@code null}
* @param name the name of the field to set; never {@code null}
* @param value the value to set
*/
public static void setField(Object targetObject, String name, @Nullable Object value) {
setField(targetObject, name, value, null);
}
/**
* Set the {@linkplain Field field} with the given {@code name}/{@code type}
* on the provided {@code targetObject} to the supplied {@code value}.
* <p>This method delegates to {@link #setField(Object, Class, String, Object, Class)},
* supplying {@code null} for the {@code targetClass} argument.
* @param targetObject the target object on which to set the field; never {@code null}
* @param name the name of the field to set; may be {@code null} if
* {@code type} is specified
* @param value the value to set
* @param type the type of the field to set; may be {@code null} if
* {@code name} is specified
*/
public static void setField(Object targetObject, @Nullable String name, @Nullable Object value, @Nullable Class<?> type) {
setField(targetObject, null, name, value, type);
}
/**
* Set the static {@linkplain Field field} with the given {@code name} on
* the provided {@code targetClass} to the supplied {@code value}.
* <p>This method delegates to {@link #setField(Object, Class, String, Object, Class)},
* supplying {@code null} for the {@code targetObject} and {@code type} arguments.
* <p>This method does not support setting {@code static final} fields.
* @param targetClass the target
|
ReflectionTestUtils
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/core/search/RediSearchClusterCursorIntegrationTests.java
|
{
"start": 1710,
"end": 13117
}
|
class ____ extends TestSupport {
private static final String INDEX = "books-cursor-cluster-idx";
private static final String PREFIX = "book:cursor:cluster:";
private final RedisClusterClient clusterClient;
private StatefulRedisClusterConnection<String, String> connection;
private RedisAdvancedClusterCommands<String, String> sync;
private RedisAdvancedClusterAsyncCommands<String, String> async;
private RedisAdvancedClusterReactiveCommands<String, String> reactive;
@Inject
RediSearchClusterCursorIntegrationTests(RedisClusterClient clusterClient) {
this.clusterClient = clusterClient;
}
@BeforeEach
void open() {
connection = clusterClient.connect();
sync = connection.sync();
async = connection.async();
reactive = connection.reactive();
}
@AfterEach
void close() {
if (connection != null)
connection.close();
}
@BeforeEach
void setUp() {
// Require Redis 8+ to match CI expectations for RediSearch behavior
assumeTrue(RedisConditions.of(sync).hasVersionGreaterOrEqualsTo("8.0"));
sync.flushall();
// Create schema
FieldArgs<String> title = TextFieldArgs.<String> builder().name("title").build();
FieldArgs<String> author = TagFieldArgs.<String> builder().name("author").build();
FieldArgs<String> year = NumericFieldArgs.<String> builder().name("year").sortable().build();
FieldArgs<String> rating = NumericFieldArgs.<String> builder().name("rating").sortable().build();
CreateArgs<String, String> createArgs = CreateArgs.<String, String> builder().withPrefix(PREFIX)
.on(CreateArgs.TargetType.HASH).build();
assertThat(sync.ftCreate(INDEX, createArgs, Arrays.asList(title, author, year, rating))).isEqualTo("OK");
// Insert data across slots
String[][] books = { { "Dune", "frank_herbert", "1965", "4.2" }, { "Lord of the Rings", "tolkien", "1954", "4.5" },
{ "Sherlock Holmes", "doyle", "1887", "4.1" }, { "Pride and Prejudice", "austen", "1813", "4.0" },
{ "Gone Girl", "flynn", "2012", "3.9" }, { "Steve Jobs", "isaacson", "2011", "4.3" },
{ "Sapiens", "harari", "2011", "4.4" }, { "Cosmos", "sagan", "1980", "4.6" } };
for (int i = 0; i < books.length; i++) {
Map<String, String> doc = new HashMap<>();
doc.put("title", books[i][0]);
doc.put("author", books[i][1]);
doc.put("year", books[i][2]);
doc.put("rating", books[i][3]);
sync.hmset(PREFIX + i, doc);
}
}
@AfterEach
void tearDown() {
try {
sync.ftDropindex(INDEX);
} catch (Exception ignore) {
}
sync.flushall();
}
@Test
void sync_cursorLifecycle_and_stickiness() {
AggregateArgs<String, String> args = AggregateArgs.<String, String> builder()
.groupBy(AggregateArgs.GroupBy.<String, String> of("author")
.reduce(AggregateArgs.Reducer.<String, String> avg("@rating").as("avg_rating")))
.withCursor(AggregateArgs.WithCursor.of(2L)).build();
AggregationReply<String, String> first = sync.ftAggregate(INDEX, "*", args);
assertThat(first.getCursor().get().getCursorId()).isGreaterThan(0);
assertThat(first.getCursor().get().getNodeId()).isPresent();
assertThat(first.getReplies()).isNotEmpty();
String nodeId = first.getCursor().get().getNodeId().get();
// Stickiness: reads route to the same node and pages advance
AggregationReply<String, String> page2 = sync.ftCursorread(INDEX, first.getCursor().get());
assertThat(page2).isNotNull();
assertThat(page2.getCursor().get().getNodeId()).isPresent();
assertThat(page2.getCursor().get().getNodeId().get()).isEqualTo(nodeId);
assertThat(page2.getReplies()).isNotEmpty();
assertThat(page2.getReplies()).isNotEqualTo(first.getReplies());
AggregationReply<String, String> page3 = sync.ftCursorread(INDEX, page2.getCursor().get());
assertThat(page3.getCursor().get().getNodeId()).isPresent();
assertThat(page3.getCursor().get().getNodeId().get()).isEqualTo(nodeId);
assertThat(page3.getReplies()).isNotEmpty();
assertThat(page3.getReplies()).isNotEqualTo(page2.getReplies());
// Delete cursor
String del = sync.ftCursordel(INDEX, page3.getCursor().get());
assertThat(del).isEqualTo("OK");
}
@Test
void async_cursorLifecycle_and_stickiness() {
AggregateArgs<String, String> args = AggregateArgs.<String, String> builder()
.groupBy(AggregateArgs.GroupBy.<String, String> of("author")
.reduce(AggregateArgs.Reducer.<String, String> avg("@rating").as("avg_rating")))
.withCursor(AggregateArgs.WithCursor.of(2L)).build();
AggregationReply<String, String> first = async.ftAggregate(INDEX, "*", args).toCompletableFuture().join();
assertThat(first.getCursor().get().getCursorId()).isGreaterThan(0);
assertThat(first.getCursor().get().getNodeId()).isPresent();
assertThat(first.getReplies()).isNotEmpty();
String nodeId = first.getCursor().get().getNodeId().get();
AggregationReply<String, String> page2 = async.ftCursorread(INDEX, first.getCursor().get()).toCompletableFuture()
.join();
assertThat(page2.getCursor().get().getNodeId()).isPresent();
assertThat(page2.getCursor().get().getNodeId().get()).isEqualTo(nodeId);
assertThat(page2.getReplies()).isNotEmpty();
assertThat(page2.getReplies()).isNotEqualTo(first.getReplies());
AggregationReply<String, String> page3 = async.ftCursorread(INDEX, page2.getCursor().get()).toCompletableFuture()
.join();
assertThat(page3.getCursor().get().getNodeId()).isPresent();
assertThat(page3.getCursor().get().getNodeId().get()).isEqualTo(nodeId);
assertThat(page3.getReplies()).isNotEmpty();
assertThat(page3.getReplies()).isNotEqualTo(page2.getReplies());
String del = async.ftCursordel(INDEX, page3.getCursor().get()).toCompletableFuture().join();
assertThat(del).isEqualTo("OK");
}
@Test
void reactive_cursorLifecycle_and_stickiness() {
AggregateArgs<String, String> args = AggregateArgs.<String, String> builder()
.groupBy(AggregateArgs.GroupBy.<String, String> of("author")
.reduce(AggregateArgs.Reducer.<String, String> avg("@rating").as("avg_rating")))
.withCursor(AggregateArgs.WithCursor.of(2L)).build();
AggregationReply<String, String> first = reactive.ftAggregate(INDEX, "*", args).block();
assertThat(first).isNotNull();
assertThat(first.getCursor().get().getCursorId()).isGreaterThan(0);
assertThat(first.getCursor().get().getNodeId()).isPresent();
assertThat(first.getReplies()).isNotEmpty();
String nodeId = first.getCursor().get().getNodeId().get();
AggregationReply<String, String> page2 = reactive.ftCursorread(INDEX, first.getCursor().get()).block();
assertThat(page2).isNotNull();
assertThat(page2.getCursor().get().getNodeId()).isPresent();
assertThat(page2.getCursor().get().getNodeId().get()).isEqualTo(nodeId);
assertThat(page2.getReplies()).isNotEmpty();
assertThat(page2.getReplies()).isNotEqualTo(first.getReplies());
AggregationReply<String, String> page3 = reactive.ftCursorread(INDEX, page2.getCursor().get()).block();
assertThat(page3.getCursor().get().getNodeId()).isPresent();
assertThat(page3.getCursor().get().getNodeId().get()).isEqualTo(nodeId);
assertThat(page3.getReplies()).isNotEmpty();
assertThat(page3.getReplies()).isNotEqualTo(page2.getReplies());
String del = reactive.ftCursordel(INDEX, page3.getCursor().get()).block();
assertThat(del).isEqualTo("OK");
}
@Test
void sync_errorHandling_missingNodeId_throws() {
AggregationReply.Cursor c = AggregationReply.Cursor.of(5L, null);
assertThatThrownBy(() -> sync.ftCursorread(INDEX, c)).isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("missing nodeId");
assertThatThrownBy(() -> sync.ftCursordel(INDEX, c)).isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("missing nodeId");
}
@Test
void async_errorHandling_missingNodeId_throws() {
AggregationReply.Cursor c2 = AggregationReply.Cursor.of(5L, null);
assertThatThrownBy(() -> async.ftCursorread(INDEX, c2).toCompletableFuture().join())
.hasCauseInstanceOf(IllegalArgumentException.class).hasMessageContaining("missing nodeId");
assertThatThrownBy(() -> async.ftCursordel(INDEX, c2).toCompletableFuture().join())
.hasCauseInstanceOf(IllegalArgumentException.class).hasMessageContaining("missing nodeId");
}
@Test
void reactive_errorHandling_missingNodeId_emitsError() {
AggregationReply.Cursor c3 = AggregationReply.Cursor.of(5L, null);
StepVerifier.create(reactive.ftCursorread(INDEX, c3))
.expectErrorSatisfies(
t -> assertThat(t).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("missing nodeId"))
.verify();
StepVerifier.create(reactive.ftCursordel(INDEX, c3))
.expectErrorSatisfies(
t -> assertThat(t).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("missing nodeId"))
.verify();
}
@Test
void async_firstIteration_rotatesAcrossUpstreamNodes() {
// Ensure we have at least two upstream nodes in the cluster; otherwise skip to avoid flakiness
long upstreams = connection.getPartitions().stream().filter(n -> n.is(RedisClusterNode.NodeFlag.UPSTREAM)).count();
assumeTrue(upstreams >= 2, "requires >= 2 upstream nodes");
AggregateArgs<String, String> args = AggregateArgs.<String, String> builder()
.groupBy(AggregateArgs.GroupBy.<String, String> of("author")
.reduce(AggregateArgs.Reducer.<String, String> avg("@rating").as("avg_rating")))
.withCursor(AggregateArgs.WithCursor.of(1L)).build();
Set<String> nodeIds = new HashSet<>();
int observedCursors = 0;
for (int i = 0; i < 30 && nodeIds.size() < upstreams; i++) {
AggregationReply<String, String> first = async.ftAggregate(INDEX, "*", args).toCompletableFuture().join();
assertThat(first).isNotNull();
if (first.getCursor().isPresent() && first.getCursor().get().getCursorId() > 0) {
observedCursors++;
first.getCursor().get().getNodeId().ifPresent(nodeId -> {
nodeIds.add(nodeId);
async.ftCursordel(INDEX, first.getCursor().get()).toCompletableFuture().join();
});
}
}
assumeTrue(observedCursors > 0, "no cursors were created; cannot validate rotation");
assertThat(nodeIds.size()).isEqualTo(upstreams);
}
}
|
RediSearchClusterCursorIntegrationTests
|
java
|
spring-projects__spring-security
|
test/src/test/java/org/springframework/security/test/context/support/WithSecurityContextTestExcecutionListenerTests.java
|
{
"start": 7883,
"end": 7919
}
|
class ____ {
}
@WithMockUser
|
Config
|
java
|
apache__camel
|
components/camel-mock/src/main/java/org/apache/camel/component/mock/TimeClause.java
|
{
"start": 1242,
"end": 1785
}
|
class ____ implements BinaryPredicate {
private static final Logger LOG = LoggerFactory.getLogger(TimeClause.class);
private final Expression left;
private final Expression right;
private Time timeFrom;
private Time timeTo;
private boolean beforeNext;
private String was;
public TimeClause(Expression left, Expression right) {
this.left = left;
this.right = right;
}
// TimeUnit DSL
// -------------------------------------------------------------------------
public
|
TimeClause
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/method/sec2499/Sec2499Tests.java
|
{
"start": 876,
"end": 1528
}
|
class ____ {
private GenericXmlApplicationContext parent;
private GenericXmlApplicationContext child;
@AfterEach
public void cleanup() {
if (this.parent != null) {
this.parent.close();
}
if (this.child != null) {
this.child.close();
}
}
@Test
public void methodExpressionHandlerInParentContextLoads() {
this.parent = new GenericXmlApplicationContext("org/springframework/security/config/method/sec2499/parent.xml");
this.child = new GenericXmlApplicationContext();
this.child.load("org/springframework/security/config/method/sec2499/child.xml");
this.child.setParent(this.parent);
this.child.refresh();
}
}
|
Sec2499Tests
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TimeoutExtensionTests.java
|
{
"start": 15275,
"end": 16747
}
|
class ____ if method is not in the test class")
void includesClassNameIfMethodIsNotInTestClass() {
EngineExecutionResults results = executeTestsForClass(NestedClassWithOuterSetupMethodTestCase.class);
Execution execution = findExecution(results.testEvents(), "testMethod()");
assertThat(execution.getDuration()) //
.isGreaterThanOrEqualTo(Duration.ofMillis(10)) //
.isLessThan(Duration.ofSeconds(1));
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable().orElseThrow()) //
.isInstanceOf(TimeoutException.class) //
.hasMessageEndingWith(
"$NestedClassWithOuterSetupMethodTestCase#setUp() timed out after 10 milliseconds");
}
@Test
@DisplayName("reports illegal timeout durations")
void reportsIllegalTimeoutDurations() {
EngineExecutionResults results = executeTestsForClass(IllegalTimeoutDurationTestCase.class);
Execution execution = findExecution(results.testEvents(), "testMethod()");
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable().orElseThrow()) //
.isInstanceOf(PreconditionViolationException.class) //
.hasMessage("timeout duration must be a positive number: 0");
}
private static Execution findExecution(Events events, String displayName) {
return events.executions()//
.filter(execution -> execution.getTestDescriptor().getDisplayName().contains(displayName))//
.findFirst() //
.orElseThrow();
}
@Nested
@DisplayName("separate thread")
|
name
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/annotations/AnyDiscriminatorValue.java
|
{
"start": 1132,
"end": 1291
}
|
interface ____ {
/**
* The discriminator value
*/
String discriminator();
/**
* The corresponding entity
*/
Class<?> entity();
}
|
AnyDiscriminatorValue
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.