language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/config/FieldRetrievingFactoryBean.java | {
"start": 5994,
"end": 6227
} | class ____ field.
int lastDotIndex = this.staticField.lastIndexOf('.');
if (lastDotIndex == -1 || lastDotIndex == this.staticField.length()) {
throw new IllegalArgumentException(
"staticField must be a fully qualified | and |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng8181CentralRepoTest.java | {
"start": 1070,
"end": 2593
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify that the central url can be overridden by a user property.
*
* @throws Exception in case of failure
*/
@Test
public void testitModel() throws Exception {
File testDir = extractResources("/mng-8181-central-repo");
Verifier verifier = newVerifier(testDir.getAbsolutePath(), null);
verifier.setAutoclean(false);
verifier.addCliArgument("--install-settings=install-settings.xml");
verifier.addCliArgument("--settings=settings.xml");
verifier.addCliArgument("-Dmaven.repo.local=" + testDir.toPath().resolve("target/local-repo"));
verifier.addCliArgument("-Dmaven.repo.local.tail=target/null");
// note: intentionally bad URL, we just want tu ensure that this bad URL is used
verifier.addCliArgument("-Dmaven.repo.central=https://repo1.maven.org");
verifier.addCliArgument("validate");
verifier.setHandleLocalRepoTail(false); // we want isolation to have Maven fail due bad URL
assertThrows(VerificationException.class, verifier::execute);
// error is
// PluginResolutionException: Plugin eu.maveniverse.maven.mimir:extension3:XXX or one of its dependencies could
// not be resolved:
// Could not find artifact eu.maveniverse.maven.mimir:extension3:jar:XXX in central (https://repo1.maven.org)
verifier.verifyTextInLog("central (https://repo1.maven.org)");
}
}
| MavenITmng8181CentralRepoTest |
java | quarkusio__quarkus | extensions/panache/hibernate-orm-panache/runtime/src/main/java/io/quarkus/hibernate/orm/panache/PanacheQuery.java | {
"start": 916,
"end": 1077
} | interface ____ be reused to obtain multiple pages of results.
* </p>
*
* @author Stéphane Épardaud
* @param <Entity> The entity type being queried
*/
public | can |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/engine/support/hierarchical/ParallelExecutionIntegrationTests.java | {
"start": 26097,
"end": 26622
} | class ____ {
@ResourceLock("A")
@Test
void firstTest() {
assertTrue(B.tryLock());
assertTrue(A.tryLock());
}
@Execution(CONCURRENT)
@ResourceLock("A")
@Test
void secondTest() {
assertTrue(B.tryLock());
assertTrue(A.tryLock());
}
@ResourceLock("A")
@Test
void thirdTest() {
assertTrue(B.tryLock());
assertTrue(A.tryLock());
}
@AfterEach
void unlock() {
A.unlock();
B.unlock();
}
}
@ExtendWith(ThreadReporter.class)
@ResourceLock("A")
static | TestCaseWithUnsortedLocks |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/BeanInfoAMoreComplexOverloadedTest.java | {
"start": 3312,
"end": 3389
} | class ____ extends BaseRequest {
public int i;
}
static | RequestA |
java | quarkusio__quarkus | extensions/kafka-streams/runtime/src/test/java/io/quarkus/kafka/streams/runtime/health/TopologyTopicsExtractTest.java | {
"start": 955,
"end": 4347
} | class ____ {
@Test
void testTopologyWithStateStore() {
Set<String> topics = new HashSet<>();
Set<Pattern> patterns = new HashSet<>();
KafkaStreamsTopologyManager.extractSources(topologyWithStateStore(), topics, patterns);
assertThat(topics).containsExactlyInAnyOrder("WEATHER_STATIONS_TOPIC", "TEMPERATURE_VALUES_TOPIC");
}
@Test
void testTopologyWithSelectKey() {
Set<String> topics = new HashSet<>();
Set<Pattern> patterns = new HashSet<>();
KafkaStreamsTopologyManager.extractSources(buildTopology(), topics, patterns);
assertThat(topics).containsExactlyInAnyOrder("streams-test-customers", "streams-test-categories");
}
public Topology topologyWithStateStore() {
StreamsBuilder builder = new StreamsBuilder();
KeyValueBytesStoreSupplier storeSupplier = Stores.persistentKeyValueStore(
"WEATHER_STATIONS_STORE");
GlobalKTable<Integer, String> stations = builder.globalTable( // <1>
"WEATHER_STATIONS_TOPIC",
Consumed.with(Serdes.Integer(), Serdes.String()));
builder.stream( // <2>
"TEMPERATURE_VALUES_TOPIC",
Consumed.with(Serdes.Integer(), Serdes.String()))
.join( // <3>
stations,
(stationId, timestampAndValue) -> stationId,
(timestampAndValue, station) -> {
String[] parts = timestampAndValue.split(";");
return parts[0] + "," + parts[1] + "," + station;
})
.groupByKey() // <4>
.aggregate( // <5>
String::new,
(stationId, value, aggregation) -> aggregation + value,
Materialized.<Integer, String> as(storeSupplier)
.withKeySerde(Serdes.Integer())
.withValueSerde(Serdes.String()))
.toStream()
.to( // <6>
"TEMPERATURES_AGGREGATED_TOPIC",
Produced.with(Serdes.Integer(), Serdes.String()));
return builder.build();
}
@Produces
public Topology buildTopology() {
StreamsBuilder builder = new StreamsBuilder();
KTable<Integer, String> categories = builder.table(
"streams-test-categories",
Consumed.with(Serdes.Integer(), Serdes.String()));
KStream<Integer, String> customers = builder
.stream("streams-test-customers", Consumed.with(Serdes.Integer(), Serdes.String()))
.selectKey((id, customer) -> customer.length())
.join(categories,
(customer, category) -> "" + customer.length() + category.length(),
Joined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
KeyValueBytesStoreSupplier storeSupplier = Stores.inMemoryKeyValueStore("countstore");
customers.groupByKey()
.count(Materialized.as(storeSupplier));
customers.selectKey((categoryId, customer) -> customer)
.to("streams-test-customers-processed", Produced.with(Serdes.String(), Serdes.String()));
return builder.build();
}
}
| TopologyTopicsExtractTest |
java | reactor__reactor-core | reactor-core/src/jcstress/java/reactor/core/publisher/SinkManyBestEffortStressTest.java | {
"start": 4190,
"end": 4973
} | class ____ {
final SinkManyBestEffort<Integer> sink = SinkManyBestEffort.createBestEffort();
final StressSubscriber<Integer> subscriber = new StressSubscriber<>(0);
final SinkManyBestEffort.DirectInner<Integer> inner;
{
sink.subscribe(subscriber);
inner = sink.subscribers[0];
}
@Actor
public void tryEmitNext(LI_Result r) {
r.r1 = sink.tryEmitNext(1);
}
@Actor
public void cancel() {
inner.set(true);
}
@Actor
public void request() {
inner.request(1);
}
@Arbiter
public void arbiter(LI_Result r) {
r.r2 = subscriber.onNextCalls.get();
}
}
@JCStressTest
@Outcome(id = {"1"}, expect = ACCEPTABLE, desc = "Complete delivered")
@State
public static | InnerTryEmitNextCancelVersusRequestStressTest |
java | spring-projects__spring-framework | spring-webflux/src/main/java/org/springframework/web/reactive/result/method/annotation/RequestBodyMethodArgumentResolver.java | {
"start": 1728,
"end": 2442
} | class ____ extends AbstractMessageReaderArgumentResolver {
public RequestBodyMethodArgumentResolver(List<HttpMessageReader<?>> readers, ReactiveAdapterRegistry registry) {
super(readers, registry);
}
@Override
public boolean supportsParameter(MethodParameter parameter) {
return parameter.hasParameterAnnotation(RequestBody.class);
}
@Override
public Mono<Object> resolveArgument(
MethodParameter param, BindingContext bindingContext, ServerWebExchange exchange) {
RequestBody ann = param.getParameterAnnotation(RequestBody.class);
Assert.state(ann != null, "No RequestBody annotation");
return readBody(param, ann.required(), bindingContext, exchange);
}
}
| RequestBodyMethodArgumentResolver |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/ConditionalOnMissingFilterBeanTests.java | {
"start": 4574,
"end": 4735
} | class ____ {
@Bean
OtherFilter myOtherFilter() {
return new OtherFilter();
}
}
@Configuration(proxyBeanMethods = false)
static | WithoutTestFilterConfig |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/search/ParsedScrollId.java | {
"start": 544,
"end": 1201
} | class ____ {
public static final String QUERY_THEN_FETCH_TYPE = "queryThenFetch";
public static final String QUERY_AND_FETCH_TYPE = "queryAndFetch";
private final String type;
private final SearchContextIdForNode[] context;
ParsedScrollId(String type, SearchContextIdForNode[] context) {
this.type = type;
this.context = context;
}
public String getType() {
return type;
}
public SearchContextIdForNode[] getContext() {
return context;
}
public boolean hasLocalIndices() {
return Arrays.stream(context).anyMatch(c -> c.getClusterAlias() == null);
}
}
| ParsedScrollId |
java | elastic__elasticsearch | modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/RestGeoIpStatsAction.java | {
"start": 936,
"end": 1454
} | class ____ extends BaseRestHandler {
@Override
public String getName() {
return "geoip_downloader_stats";
}
@Override
public List<Route> routes() {
return List.of(new Route(GET, "/_ingest/geoip/stats"));
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) {
return channel -> client.execute(GeoIpStatsAction.INSTANCE, new GeoIpStatsAction.Request(), new RestToXContentListener<>(channel));
}
}
| RestGeoIpStatsAction |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/ProcedureParameter.java | {
"start": 1028,
"end": 2312
} | class ____ {
@Nullable private final String name;
private final int position;
private final ParameterMode mode;
private final Class<?> type;
ProcedureParameter(@Nullable String name, int position, ParameterMode mode, Class<?> type) {
this.name = name;
this.position = position;
this.mode = mode;
this.type = type;
}
/**
* @return the parameter name. Can be {@literal null}.
*/
@Nullable
String getName() {
return name;
}
/**
* @return the {@code one} based parameter position as listed in
* {@link jakarta.persistence.NamedStoredProcedureQuery#parameters()}
* @since 3.2.6
*/
int getPosition() {
return position;
}
ParameterMode getMode() {
return mode;
}
Class<?> getType() {
return type;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof ProcedureParameter that)) {
return false;
}
return Objects.equals(name, that.name) && mode == that.mode && Objects.equals(type, that.type);
}
@Override
public int hashCode() {
return Objects.hash(name, mode, type);
}
@Override
public String toString() {
return "ProcedureParameter{" + "name='" + name + '\'' + ", position=" + position + ", mode=" + mode + ", type="
+ type + '}';
}
}
| ProcedureParameter |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CatchingUncheckedTest.java | {
"start": 1155,
"end": 1604
} | class ____ {
void test() {
try {
this.hashCode();
// BUG: Diagnostic contains:
} catch (Exception e) {
}
}
}
""")
.doTest();
}
@Test
public void positiveMultiCatch() {
helper
.addSourceLines(
"Test.java",
"""
import java.io.IOException;
| Test |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/configuration/internal/metadata/reader/AuditedPropertiesReader.java | {
"start": 9384,
"end": 9831
} | class ____ not been marked as audited by the subclass.
overriddenNotAuditedClasses.add( overrideClass );
}
}
}
}
}
final ClassDetails superclass = classDetails.getSuperClass();
if ( !classDetails.isInterface() && !OBJECT_CLASS_NAME.equals( superclass.getName() ) ) {
readAuditOverrides( superclass );
}
}
/**
* @param classDetails Source class.
*
* @return List of @AuditOverride annotations applied at | has |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeDelayWithCompletable.java | {
"start": 883,
"end": 1334
} | class ____<T> extends Maybe<T> {
final MaybeSource<T> source;
final CompletableSource other;
public MaybeDelayWithCompletable(MaybeSource<T> source, CompletableSource other) {
this.source = source;
this.other = other;
}
@Override
protected void subscribeActual(MaybeObserver<? super T> observer) {
other.subscribe(new OtherObserver<>(observer, source));
}
static final | MaybeDelayWithCompletable |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/legacy/TwoPhaseCommitSinkFunctionTest.java | {
"start": 12275,
"end": 12746
} | class ____ extends KryoSerializer<ContentTransaction> {
public ContentTransactionSerializer() {
super(ContentTransaction.class, new SerializerConfigImpl());
}
@Override
public KryoSerializer<ContentTransaction> duplicate() {
return this;
}
@Override
public String toString() {
return "ContentTransactionSerializer";
}
}
private static | ContentTransactionSerializer |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/invoker/invalid/PrivateMethodInvokerTest.java | {
"start": 1370,
"end": 1468
} | class ____ {
private String hello() {
return "foobar";
}
}
}
| MyService |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/AbstractIntegerAssert.java | {
"start": 1521,
"end": 22487
} | class ____<SELF extends AbstractIntegerAssert<SELF>> extends
AbstractComparableAssert<SELF, Integer> implements NumberAssert<SELF, Integer> {
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
Integers integers = Integers.instance();
protected AbstractIntegerAssert(Integer actual, Class<?> selfType) {
super(actual, selfType);
}
/**
* Verifies that the actual value is equal to the given one.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass:
* assertThat(1).isEqualTo(1);
* assertThat(-1).isEqualTo(-1);
*
* // assertions will fail:
* assertThat(1).isEqualTo(2);
* assertThat(1).isEqualTo(-1);</code></pre>
*
* @param expected the given value to compare the actual value to.
* @return {@code this} assertion object.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is not equal to the given one.
*/
public SELF isEqualTo(int expected) {
integers.assertEqual(info, actual, expected);
return myself;
}
/**
* Verifies that the actual value is equal to the given long.
* If the long value is not in [{@link Integer#MIN_VALUE}, {@link Integer#MAX_VALUE}], the assertion simply fails.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass:
* assertThat(1).isEqualTo(1L);
* assertThat(-1).isEqualTo(-1L);
*
* // assertions will fail:
* assertThat(1).isEqualTo(2L);
* assertThat(1).isEqualTo(-1L);</code></pre>
*
* @param expected the given value to compare the actual value to.
* @return {@code this} assertion object.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is not equal to the given one.
* @since 3.10.0
*/
public SELF isEqualTo(long expected) {
if (canBeCastToInt(expected)) {
integers.assertEqual(info, actual, (int) expected);
} else {
integers.assertEqual(info, actual, expected);
}
return myself;
}
private static boolean canBeCastToInt(long expected) {
return expected <= Integer.MAX_VALUE && expected >= Integer.MIN_VALUE;
}
/**
* Verifies that the actual value is not equal to the given one.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass:
* assertThat(1).isNotEqualTo(2);
* assertThat(1).isNotEqualTo(-1);
*
* // assertion will fail:
* assertThat(1).isNotEqualTo(1);</code></pre>
*
* @param other the given value to compare the actual value to.
* @return {@code this} assertion object.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is equal to the given one.
*/
public SELF isNotEqualTo(int other) {
integers.assertNotEqual(info, actual, other);
return myself;
}
/** {@inheritDoc} */
@Override
public SELF isZero() {
integers.assertIsZero(info, actual);
return myself;
}
/** {@inheritDoc} */
@Override
public SELF isNotZero() {
integers.assertIsNotZero(info, actual);
return myself;
}
/** {@inheritDoc} */
@Override
public SELF isOne() {
integers.assertIsOne(info, actual);
return myself;
}
/** {@inheritDoc} */
@Override
public SELF isPositive() {
integers.assertIsPositive(info, actual);
return myself;
}
/** {@inheritDoc} */
@Override
public SELF isNegative() {
integers.assertIsNegative(info, actual);
return myself;
}
/** {@inheritDoc} */
@Override
public SELF isNotNegative() {
integers.assertIsNotNegative(info, actual);
return myself;
}
/** {@inheritDoc} */
@Override
public SELF isNotPositive() {
integers.assertIsNotPositive(info, actual);
return myself;
}
/**
* Verifies that the actual value is even.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass
* assertThat(12).isEven();
* assertThat(-46).isEven();
*
* // assertions will fail
* assertThat(3).isEven();
* assertThat(15).isEven();</code></pre>
*
* @return this assertion object.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is not even.
* @since 3.17.0
*/
public SELF isEven() {
integers.assertIsEven(info, actual);
return myself;
}
/**
* Verifies that the actual value is odd.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass
* assertThat(3).isOdd();
* assertThat(-17).isOdd();
*
* // assertions will fail
* assertThat(2).isOdd();
* assertThat(-24).isOdd();</code></pre>
*
* @return this assertion object.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is not odd.
* @since 3.17.0
*/
public SELF isOdd() {
integers.assertIsOdd(info, actual);
return myself;
}
/**
* Verifies that the actual value is less than the given one.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass:
* assertThat(1).isLessThan(2);
* assertThat(-2).isLessThan(-1);
*
* // assertions will fail:
* assertThat(1).isLessThan(0);
* assertThat(1).isLessThan(1);</code></pre>
*
* @param other the given value to compare the actual value to.
* @return {@code this} assertion object.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is equal to or greater than the given one.
*/
public SELF isLessThan(int other) {
integers.assertLessThan(info, actual, other);
return myself;
}
/**
* Verifies that the actual value is less than or equal to the given one.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass:
* assertThat(1).isLessThanOrEqualTo(2);
* assertThat(-1).isLessThanOrEqualTo(-2);
* assertThat(1).isLessThanOrEqualTo(1);
*
* // assertions will fail:
* assertThat(1).isLessThanOrEqualTo(2);
* assertThat(-1).isLessThanOrEqualTo(-2);</code></pre>
*
* @param other the given value to compare the actual value to.
* @return {@code this} assertion object.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is greater than the given one.
*/
public SELF isLessThanOrEqualTo(int other) {
integers.assertLessThanOrEqualTo(info, actual, other);
return myself;
}
/**
* Verifies that the actual value is greater than the given one.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass:
* assertThat(1).isGreaterThan(0);
* assertThat(-1).isGreaterThan(-2);
*
* // assertions will fail:
* assertThat(1).isGreaterThan(2);
* assertThat(1).isGreaterThan(1);</code></pre>
*
* @param other the given value to compare the actual value to.
* @return {@code this} assertion object.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is equal to or less than the given one.
*/
public SELF isGreaterThan(int other) {
integers.assertGreaterThan(info, actual, other);
return myself;
}
/**
* Verifies that the actual value is greater than or equal to the given one.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass:
* assertThat(2).isGreaterThanOrEqualTo(1);
* assertThat(1).isGreaterThanOrEqualTo(1);
*
* // assertions will fail:
* assertThat(1).isGreaterThanOrEqualTo(2);
* assertThat(-1).isGreaterThanOrEqualTo(1);</code></pre>
*
* @param other the given value to compare the actual value to.
* @return {@code this} assertion object.
* @throws AssertionError if the actual value is {@code null}.
* @throws AssertionError if the actual value is less than the given one.
*/
public SELF isGreaterThanOrEqualTo(int other) {
integers.assertGreaterThanOrEqualTo(info, actual, other);
return myself;
}
/**
* Verifies that the actual value is in [start, end] range (start included, end included).
*
* <p>
* Example:
* <pre><code class='java'> // assertions will pass
* assertThat(1).isBetween(-1, 2);
* assertThat(1).isBetween(1, 2);
* assertThat(1).isBetween(0, 1);
*
* // assertion will fail
* assertThat(1).isBetween(2, 3);</code></pre>
*/
@Override
public SELF isBetween(Integer start, Integer end) {
integers.assertIsBetween(info, actual, start, end);
return myself;
}
/**
* Verifies that the actual value is in ]start, end[ range (start excluded, end excluded).
*
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(1).isStrictlyBetween(-1, 2);
*
* // assertions will fail
* assertThat(1).isStrictlyBetween(1, 2);
* assertThat(1).isStrictlyBetween(0, 1);
* assertThat(1).isStrictlyBetween(2, 3);</code></pre>
*/
@Override
public SELF isStrictlyBetween(Integer start, Integer end) {
integers.assertIsStrictlyBetween(info, actual, start, end);
return myself;
}
/**
* Verifies that the actual number is close to the given one within the given offset value.
* <p>
* When <i>abs(actual - expected) == offset value</i>, the assertion:
* <ul>
* <li><b>succeeds</b> when using {@link Assertions#within(Integer)}</li>
* <li><b>fails</b> when using {@link Assertions#byLessThan(Integer)} or {@link Offset#strictOffset(Number)}</li>
* </ul>
* <p>
* <b>Breaking change</b> since 2.9.0/3.9.0: using {@link Assertions#byLessThan(Integer)} implies a <b>strict</b> comparison,
* use {@link Assertions#within(Integer)} to get the old behavior.
* <p>
* Examples:
* <pre><code class='java'> // assertions will pass:
* assertThat(5).isCloseTo(7, within(3));
* assertThat(5).isCloseTo(7, byLessThan(3));
*
* // if difference is exactly equals to the offset, it's ok ...
* assertThat(5).isCloseTo(7, within(2));
* // ... but not with byLessThan which implies a strict comparison
* assertThat(5).isCloseTo(7, byLessThan(2)); // FAIL
*
* // assertions will fail
* assertThat(5).isCloseTo(7, within(1));
* assertThat(5).isCloseTo(7, byLessThan(1));
* assertThat(5).isCloseTo(7, byLessThan(2));</code></pre>
*
* @param expected the given int to compare the actual value to.
* @param offset the given positive offset.
* @return {@code this} assertion object.
* @throws NullPointerException if the given offset is {@code null}.
* @throws AssertionError if the actual value is not close enough to the given one.
*/
public SELF isCloseTo(int expected, Offset<Integer> offset) {
integers.assertIsCloseTo(info, actual, expected, offset);
return myself;
}
/**
* Verifies that the actual number is not close to the given one by less than the given offset.<br>
* <p>
* When <i>abs(actual - expected) == offset value</i>, the assertion:
* <ul>
* <li><b>succeeds</b> when using {@link Assertions#byLessThan(Integer)} or {@link Offset#strictOffset(Number)}</li>
* <li><b>fails</b> when using {@link Assertions#within(Integer)}</li>
* </ul>
* <p>
* <b>Breaking change</b> since 2.9.0/3.9.0: using {@link Assertions#byLessThan(Integer)} implies a <b>strict</b> comparison,
* use {@link Assertions#within(Integer)} to get the old behavior.
* <p>
* Examples:
* <pre><code class='java'> // assertions will pass:
* assertThat(5).isNotCloseTo(7, byLessThan(1));
* assertThat(5).isNotCloseTo(7, within(1));
* // diff == offset but isNotCloseTo succeeds as we use byLessThan
* assertThat(5).isNotCloseTo(7, byLessThan(2));
*
* // assertions will fail
* assertThat(5).isNotCloseTo(7, within(2));
* assertThat(5).isNotCloseTo(7, byLessThan(3));</code></pre>
*
* @param expected the given int to compare the actual value to.
* @param offset the given positive offset.
* @return {@code this} assertion object.
* @throws NullPointerException if the given offset is {@code null}.
* @throws AssertionError if the actual value is close to the given one.
* @see Assertions#byLessThan(Integer)
* @since 2.6.0 / 3.6.0
*/
public SELF isNotCloseTo(int expected, Offset<Integer> offset) {
integers.assertIsNotCloseTo(info, actual, expected, offset);
return myself;
}
/**
* Verifies that the actual number is close to the given one within the given offset value.
* <p>
* When <i>abs(actual - expected) == offset value</i>, the assertion:
* <ul>
* <li><b>succeeds</b> when using {@link Assertions#within(Integer)}</li>
* <li><b>fails</b> when using {@link Assertions#byLessThan(Integer)} or {@link Offset#strictOffset(Number)}</li>
* </ul>
* <p>
* <b>Breaking change</b> since 2.9.0/3.9.0: using {@link Assertions#byLessThan(Integer)} implies a <b>strict</b> comparison,
* use {@link Assertions#within(Integer)} to get the old behavior.
* <p>
* Examples:
* <pre><code class='java'> // assertions succeed:
* assertThat(5).isCloseTo(7, within(3));
* assertThat(5).isCloseTo(7, byLessThan(3));
*
* // if difference is exactly equals to the offset, it's ok ...
* assertThat(5).isCloseTo(7, within(2));
* // ... but not with byLessThan which implies a strict comparison
* assertThat(5).isCloseTo(7, byLessThan(2)); // FAIL
*
* // assertions fail
* assertThat(5).isCloseTo(7, within(1));
* assertThat(5).isCloseTo(7, byLessThan(1));
* assertThat(5).isCloseTo(7, byLessThan(2));</code></pre>
*
* @param expected the given int to compare the actual value to.
* @param offset the given positive offset.
* @return {@code this} assertion object.
* @throws NullPointerException if the given offset is {@code null}.
* @throws AssertionError if the actual value is not close enough to the given one.
*/
@Override
public SELF isCloseTo(Integer expected, Offset<Integer> offset) {
integers.assertIsCloseTo(info, actual, expected, offset);
return myself;
}
/**
* Verifies that the actual number is not close to the given one by less than the given offset.<br>
* <p>
* When <i>abs(actual - expected) == offset value</i>, the assertion:
* <ul>
* <li><b>succeeds</b> when using {@link Assertions#byLessThan(Integer)} or {@link Offset#strictOffset(Number)}</li>
* <li><b>fails</b> when using {@link Assertions#within(Integer)}</li>
* </ul>
* <p>
* <b>Breaking change</b> since 2.9.0/3.9.0: using {@link Assertions#byLessThan(Integer)} implies a <b>strict</b> comparison,
* use {@link Assertions#within(Integer)} to get the old behavior.
* <p>
* Examples:
* <pre><code class='java'> // assertions will pass:
* assertThat(5).isNotCloseTo(7, byLessThan(1));
* assertThat(5).isNotCloseTo(7, within(1));
* // diff == offset but isNotCloseTo succeeds as we use byLessThan
* assertThat(5).isNotCloseTo(7, byLessThan(2));
*
* // assertions will fail
* assertThat(5).isNotCloseTo(7, within(2));
* assertThat(5).isNotCloseTo(7, byLessThan(3));</code></pre>
*
* @param expected the given int to compare the actual value to.
* @param offset the given positive offset.
* @return {@code this} assertion object.
* @throws NullPointerException if the given offset is {@code null}.
* @throws AssertionError if the actual value is close to the given one.
* @see Assertions#byLessThan(Integer)
* @since 2.6.0 / 3.6.0
*/
@Override
public SELF isNotCloseTo(Integer expected, Offset<Integer> offset) {
integers.assertIsNotCloseTo(info, actual, expected, offset);
return myself;
}
/**
* Verifies that the actual number is close to the given one within the given percentage.<br>
* If difference is equal to the percentage value, assertion is considered valid.
* <p>
* Example with integer:
* <pre><code class='java'> // assertions will pass:
* assertThat(11).isCloseTo(Integer.valueOf(10), withinPercentage(20));
*
* // if difference is exactly equals to the computed offset (1), it's ok
* assertThat(11).isCloseTo(Integer.valueOf(10), withinPercentage(10));
*
* // assertion will fail
* assertThat(11).isCloseTo(Integer.valueOf(10), withinPercentage(5));</code></pre>
*
* @param expected the given number to compare the actual value to.
* @param percentage the given positive percentage.
* @return {@code this} assertion object.
* @throws NullPointerException if the given offset is {@code null}.
* @throws NullPointerException if the expected number is {@code null}.
* @throws AssertionError if the actual value is not equal to the given one.
*/
@Override
public SELF isCloseTo(Integer expected, Percentage percentage) {
integers.assertIsCloseToPercentage(info, actual, expected, percentage);
return myself;
}
/**
* Verifies that the actual number is not close to the given one by the given percentage.<br>
* If difference is equal to the percentage value, the assertion fails.
* <p>
* Example with integer:
* <pre><code class='java'> // assertion will pass:
* assertThat(11).isNotCloseTo(Integer.valueOf(10), withinPercentage(5));
*
* // assertions will fail
* assertThat(11).isNotCloseTo(Integer.valueOf(10), withinPercentage(10));
* assertThat(11).isNotCloseTo(Integer.valueOf(10), withinPercentage(20));</code></pre>
*
* @param expected the given number to compare the actual value to.
* @param percentage the given positive percentage.
* @return {@code this} assertion object.
* @throws NullPointerException if the given offset is {@code null}.
* @throws NullPointerException if the expected number is {@code null}.
* @throws AssertionError if the actual value is close to the given one.
* @since 2.6.0 / 3.6.0
*/
@Override
public SELF isNotCloseTo(Integer expected, Percentage percentage) {
integers.assertIsNotCloseToPercentage(info, actual, expected, percentage);
return myself;
}
/**
* Verifies that the actual number is close to the given one within the given percentage.<br>
* If difference is equal to the percentage value, assertion is considered valid.
* <p>
* Example with integer:
* <pre><code class='java'> // assertions will pass:
* assertThat(11).isCloseTo(10, withinPercentage(20));
*
* // if difference is exactly equals to the computed offset (1), it's ok
* assertThat(11).isCloseTo(10, withinPercentage(10));
*
* // assertion will fail
* assertThat(11).isCloseTo(10, withinPercentage(5));</code></pre>
*
* @param expected the given number to compare the actual value to.
* @param percentage the given positive percentage.
* @return {@code this} assertion object.
* @throws NullPointerException if the given offset is {@code null}.
* @throws NullPointerException if the expected number is {@code null}.
* @throws AssertionError if the actual value is not close enough to the given one.
*/
public SELF isCloseTo(int expected, Percentage percentage) {
integers.assertIsCloseToPercentage(info, actual, expected, percentage);
return myself;
}
/**
* Verifies that the actual number is not close to the given one by the given percentage.<br>
* If difference is equal to the percentage value, the assertion fails.
* <p>
* Example with integer:
* <pre><code class='java'> // assertion will pass:
* assertThat(11).isNotCloseTo(10, withinPercentage(5));
*
* // assertions will fail
* assertThat(11).isNotCloseTo(10, withinPercentage(10));
* assertThat(11).isNotCloseTo(10, withinPercentage(20));</code></pre>
*
* @param expected the given number to compare the actual value to.
* @param percentage the given positive percentage.
* @return {@code this} assertion object.
* @throws NullPointerException if the given {@link Percentage} is {@code null}.
* @throws NullPointerException if the expected number is {@code null}.
* @throws AssertionError if the actual value is close to the given one.
* @since 2.6.0 / 3.6.0
*/
public SELF isNotCloseTo(int expected, Percentage percentage) {
integers.assertIsNotCloseToPercentage(info, actual, expected, percentage);
return myself;
}
@Override
@CheckReturnValue
public SELF usingComparator(Comparator<? super Integer> customComparator) {
return usingComparator(customComparator, null);
}
@Override
@CheckReturnValue
public SELF usingComparator(Comparator<? super Integer> customComparator, String customComparatorDescription) {
integers = new Integers(new ComparatorBasedComparisonStrategy(customComparator, customComparatorDescription));
return super.usingComparator(customComparator, customComparatorDescription);
}
@Override
@CheckReturnValue
public SELF usingDefaultComparator() {
integers = Integers.instance();
return super.usingDefaultComparator();
}
}
| AbstractIntegerAssert |
java | elastic__elasticsearch | modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DissectProcessorFactoryTests.java | {
"start": 904,
"end": 3746
} | class ____ extends ESTestCase {
public void testCreate() {
DissectProcessor.Factory factory = new DissectProcessor.Factory();
String fieldName = RandomDocumentPicks.randomFieldName(random());
String processorTag = randomAlphaOfLength(10);
String pattern = "%{a},%{b},%{c}";
String appendSeparator = ":";
Map<String, Object> config = new HashMap<>();
config.put("field", fieldName);
config.put("pattern", pattern);
config.put("append_separator", appendSeparator);
config.put("ignore_missing", true);
DissectProcessor processor = factory.create(null, processorTag, null, config, null);
assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.field, equalTo(fieldName));
assertThat(processor.pattern, equalTo(pattern));
assertThat(processor.appendSeparator, equalTo(appendSeparator));
assertThat(processor.dissectParser, is(notNullValue()));
assertThat(processor.ignoreMissing, is(true));
}
public void testCreateMissingField() {
DissectProcessor.Factory factory = new DissectProcessor.Factory();
Map<String, Object> config = new HashMap<>();
config.put("pattern", "%{a},%{b},%{c}");
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", null, config, null));
assertThat(e.getMessage(), equalTo("[field] required property is missing"));
}
public void testCreateMissingPattern() {
DissectProcessor.Factory factory = new DissectProcessor.Factory();
Map<String, Object> config = new HashMap<>();
config.put("field", randomAlphaOfLength(10));
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, "_tag", null, config, null));
assertThat(e.getMessage(), equalTo("[pattern] required property is missing"));
}
public void testCreateMissingOptionals() {
DissectProcessor.Factory factory = new DissectProcessor.Factory();
Map<String, Object> config = new HashMap<>();
config.put("pattern", "%{a},%{b},%{c}");
config.put("field", randomAlphaOfLength(10));
DissectProcessor processor = factory.create(null, "_tag", null, config, null);
assertThat(processor.appendSeparator, equalTo(""));
assertThat(processor.ignoreMissing, is(false));
}
public void testCreateBadPattern() {
DissectProcessor.Factory factory = new DissectProcessor.Factory();
Map<String, Object> config = new HashMap<>();
config.put("pattern", "no keys defined");
config.put("field", randomAlphaOfLength(10));
expectThrows(DissectException.class, () -> factory.create(null, "_tag", null, config, null));
}
}
| DissectProcessorFactoryTests |
java | quarkusio__quarkus | extensions/kafka-streams/runtime-dev/src/main/java/io/quarkus/kafka/streams/runtime/dev/ui/TopologyParserContext.java | {
"start": 229,
"end": 3228
} | class ____ {
String currentNode = "";
final Set<String> subTopologies = new TreeSet<>();
final Set<String> sources = new TreeSet<>();
final Set<String> sinks = new TreeSet<>();
final Set<String> stores = new TreeSet<>();
final Graphviz graphviz = new Graphviz();
final Mermaid mermaid = new Mermaid();
void addSubTopology(String subTopology) {
final var sanitizedSubTopology = sanitize(subTopology);
subTopologies.add(sanitizedSubTopology);
graphviz.addSubTopology(sanitizedSubTopology);
mermaid.addSubTopology(sanitizedSubTopology);
}
void addSink(String sink, String topic) {
final var sanitizedTopic = sanitize(topic);
sinks.add(sanitizedTopic);
final var sanitizedSink = sanitize(sink);
currentNode = sanitize(sanitizedSink);
graphviz.addSink(sanitizedSink, sanitizedTopic);
mermaid.addSink(sanitizedSink, sanitizedTopic);
}
void addSources(String source, String[] topics) {
currentNode = sanitize(source);
Arrays.stream(topics)
.map(String::trim).filter(topic -> !topic.isEmpty())
.forEachOrdered(topic -> {
final var sanitizedTopic = sanitize(topic);
sources.add(sanitizedTopic);
graphviz.addSource(currentNode, sanitizedTopic);
mermaid.addSource(currentNode, sanitizedTopic);
});
}
void addRegexSource(String source, String regex) {
currentNode = sanitize(source);
final var sanitizedRegex = sanitize(regex);
if (!sanitizedRegex.isEmpty()) {
sources.add(sanitizedRegex);
graphviz.addRegexSource(currentNode, sanitizedRegex);
mermaid.addRegexSource(currentNode, sanitizedRegex);
}
}
void addStores(String[] stores, String processor, boolean join) {
currentNode = sanitize(processor);
Arrays.stream(stores)
.map(String::trim).filter(store -> !store.isEmpty())
.forEachOrdered(store -> {
final var sanitizedStore = sanitize(store);
this.stores.add(sanitizedStore);
graphviz.addStore(sanitizedStore, currentNode, join);
mermaid.addStore(sanitizedStore, currentNode, join);
});
}
void addTargets(String[] targets) {
Arrays.stream(targets)
.map(String::trim).filter(target -> !("none".equals(target) || target.isEmpty()))
.forEachOrdered(target -> {
final var sanitizedTarget = sanitize(target);
graphviz.addTarget(sanitizedTarget, currentNode);
mermaid.addTarget(sanitizedTarget, currentNode);
});
}
private static String sanitize(String name) {
return name != null ? name.trim().replaceAll("\"", "") : null;
}
static final | TopologyParserContext |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/DataSetEndpointBuilderFactory.java | {
"start": 7117,
"end": 14020
} | interface ____
extends
EndpointConsumerBuilder {
default DataSetEndpointConsumerBuilder basic() {
return (DataSetEndpointConsumerBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedDataSetEndpointConsumerBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedDataSetEndpointConsumerBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedDataSetEndpointConsumerBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedDataSetEndpointConsumerBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedDataSetEndpointConsumerBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedDataSetEndpointConsumerBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Maximum number of messages to keep in memory available for browsing.
* Use 0 for unlimited.
*
* The option is a: <code>int</code> type.
*
* Default: 100
* Group: advanced
*
* @param browseLimit the value to set
* @return the dsl builder
*/
default AdvancedDataSetEndpointConsumerBuilder browseLimit(int browseLimit) {
doSetProperty("browseLimit", browseLimit);
return this;
}
/**
* Maximum number of messages to keep in memory available for browsing.
* Use 0 for unlimited.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 100
* Group: advanced
*
* @param browseLimit the value to set
* @return the dsl builder
*/
default AdvancedDataSetEndpointConsumerBuilder browseLimit(String browseLimit) {
doSetProperty("browseLimit", browseLimit);
return this;
}
}
/**
* Builder for endpoint producers for the Dataset component.
*/
public | AdvancedDataSetEndpointConsumerBuilder |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockPoolManager.java | {
"start": 1610,
"end": 7888
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TestBlockPoolManager.class);
private final DataNode mockDN = Mockito.mock(DataNode.class);
private BlockPoolManager bpm;
private final StringBuilder log = new StringBuilder();
private int mockIdx = 1;
@BeforeEach
public void setupBPM() {
bpm = new BlockPoolManager(mockDN){
@Override
protected BPOfferService createBPOS(
final String nameserviceId,
List<String> nnIds,
List<InetSocketAddress> nnAddrs,
List<InetSocketAddress> lifelineNnAddrs) {
final int idx = mockIdx++;
doLog("create #" + idx);
final BPOfferService bpos = Mockito.mock(BPOfferService.class);
Mockito.doReturn("Mock BPOS #" + idx).when(bpos).toString();
List<BPServiceActor> bpsa = new ArrayList<>(nnIds.size());
for (int i = 0; i < nnIds.size(); i++) {
BPServiceActor actor = Mockito.mock(BPServiceActor.class);
Mockito.doReturn(nnIds.get(i)).when(actor).getNnId();
Mockito.doReturn(nnAddrs.get(i)).when(actor).getNNSocketAddress();
bpsa.add(actor);
}
Mockito.doReturn(bpsa).when(bpos).getBPServiceActors();
// Log refreshes
try {
Mockito.doAnswer(
new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
doLog("refresh #" + idx);
return null;
}
}).when(bpos).refreshNNList(Mockito.anyString(),
Mockito.<List<String>>any(),
Mockito.<ArrayList<InetSocketAddress>>any(),
Mockito.<ArrayList<InetSocketAddress>>any());
} catch (IOException e) {
throw new RuntimeException(e);
}
// Log stops
Mockito.doAnswer(
new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
doLog("stop #" + idx);
bpm.remove(bpos);
return null;
}
}).when(bpos).stop();
return bpos;
}
};
}
private void doLog(String string) {
synchronized(log) {
LOG.info(string);
log.append(string).append("\n");
}
}
@Test
public void testSimpleSingleNS() throws Exception {
Configuration conf = new Configuration();
conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY,
"hdfs://mock1:8020");
bpm.refreshNamenodes(conf);
assertEquals("create #1\n", log.toString());
}
@Test
public void testFederationRefresh() throws Exception {
Configuration conf = new Configuration();
conf.set(DFSConfigKeys.DFS_NAMESERVICES,
"ns1,ns2");
addNN(conf, "ns1", "mock1:8020");
addNN(conf, "ns2", "mock1:8020");
bpm.refreshNamenodes(conf);
assertEquals(
"create #1\n" +
"create #2\n", log.toString());
log.setLength(0);
// Remove the first NS
conf.set(DFSConfigKeys.DFS_NAMESERVICES,
"ns2");
bpm.refreshNamenodes(conf);
assertEquals(
"stop #1\n" +
"refresh #2\n", log.toString());
log.setLength(0);
// Add back an NS -- this creates a new BPOS since the old
// one for ns2 should have been previously retired
conf.set(DFSConfigKeys.DFS_NAMESERVICES,
"ns1,ns2");
bpm.refreshNamenodes(conf);
assertEquals(
"create #3\n" +
"refresh #2\n", log.toString());
}
@Test
public void testInternalNameService() throws Exception {
Configuration conf = new Configuration();
conf.set(DFSConfigKeys.DFS_NAMESERVICES, "ns1,ns2,ns3");
addNN(conf, "ns1", "mock1:8020");
addNN(conf, "ns2", "mock1:8020");
addNN(conf, "ns3", "mock1:8020");
conf.set(DFSConfigKeys.DFS_INTERNAL_NAMESERVICES_KEY, "ns1");
bpm.refreshNamenodes(conf);
assertEquals("create #1\n", log.toString());
Map<String, BPOfferService> map = bpm.getBpByNameserviceId();
assertFalse(map.containsKey("ns2"));
assertFalse(map.containsKey("ns3"));
assertTrue(map.containsKey("ns1"));
log.setLength(0);
}
@Test
public void testNameServiceNeedToBeResolved() throws Exception {
Configuration conf = new Configuration();
conf.set(DFSConfigKeys.DFS_NAMESERVICES, "ns1,ns2,ns3");
addNN(conf, "ns1", "mock1:8020");
addNN(conf, "ns2", "mock1:8020");
addNN(conf, "ns3", MockDomainNameResolver.DOMAIN + ":8020");
addDNSSettings(conf, "ns3");
bpm.refreshNamenodes(conf);
assertEquals(
"create #1\n" +
"create #2\n" +
"create #3\n", log.toString());
Map<String, BPOfferService> map = bpm.getBpByNameserviceId();
assertTrue(map.containsKey("ns1"));
assertTrue(map.containsKey("ns2"));
assertTrue(map.containsKey("ns3"));
assertEquals(2, map.get("ns3").getBPServiceActors().size());
assertEquals("ns3-" + MockDomainNameResolver.FQDN_1 + "-8020",
map.get("ns3").getBPServiceActors().get(0).getNnId());
assertEquals("ns3-" + MockDomainNameResolver.FQDN_2 + "-8020",
map.get("ns3").getBPServiceActors().get(1).getNnId());
assertEquals(
new InetSocketAddress(MockDomainNameResolver.FQDN_1, 8020),
map.get("ns3").getBPServiceActors().get(0).getNNSocketAddress());
assertEquals(
new InetSocketAddress(MockDomainNameResolver.FQDN_2, 8020),
map.get("ns3").getBPServiceActors().get(1).getNNSocketAddress());
log.setLength(0);
}
/**
* Add more DNS related settings to the passed in configuration.
* @param config Configuration file to add settings to.
*/
private void addDNSSettings(Configuration config,
String nameservice) {
config.setBoolean(
DFSConfigKeys.DFS_NAMESERVICES_RESOLUTION_ENABLED + "."
+ nameservice, true);
config.set(
DFSConfigKeys.DFS_NAMESERVICES_RESOLVER_IMPL + "." + nameservice,
MockDomainNameResolver.class.getName());
}
private static void addNN(Configuration conf, String ns, String addr) {
String key = DFSUtil.addKeySuffixes(
DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY, ns);
conf.set(key, addr);
}
}
| TestBlockPoolManager |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/reflect/ClassUtils.java | {
"start": 14473,
"end": 14757
} | class ____ {
private final Set<String> missingTypes;
public Optimizations(Set<String> missingTypes) {
this.missingTypes = missingTypes;
}
public Set<String> getMissingTypes() {
return missingTypes;
}
}
}
| Optimizations |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/LocalDateComparator.java | {
"start": 1388,
"end": 6179
} | class ____ extends TypeComparator<LocalDate> implements Serializable {
private transient LocalDate reference;
protected final boolean ascendingComparison;
// For use by getComparators
@SuppressWarnings("rawtypes")
private final LocalDateComparator[] comparators = new LocalDateComparator[] {this};
public LocalDateComparator(boolean ascending) {
this.ascendingComparison = ascending;
}
@Override
public int hash(LocalDate value) {
return value.hashCode();
}
@Override
public void setReference(LocalDate toCompare) {
this.reference = toCompare;
}
@Override
public boolean equalToReference(LocalDate candidate) {
return candidate.equals(reference);
}
@Override
public int compareToReference(TypeComparator<LocalDate> referencedComparator) {
int comp = ((LocalDateComparator) referencedComparator).reference.compareTo(reference);
return ascendingComparison ? comp : -comp;
}
@Override
public int compare(LocalDate first, LocalDate second) {
int cmp = first.compareTo(second);
return ascendingComparison ? cmp : -cmp;
}
@Override
public boolean invertNormalizedKey() {
return !ascendingComparison;
}
@Override
public boolean supportsSerializationWithKeyNormalization() {
return false;
}
@Override
public void writeWithKeyNormalization(LocalDate record, DataOutputView target)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int extractKeys(Object record, Object[] target, int index) {
target[index] = record;
return 1;
}
@SuppressWarnings("rawtypes")
@Override
public TypeComparator[] getFlatComparators() {
return comparators;
}
@Override
public LocalDate readWithKeyDenormalization(LocalDate reuse, DataInputView source)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int compareSerialized(DataInputView firstSource, DataInputView secondSource)
throws IOException {
return compareSerializedLocalDate(firstSource, secondSource, ascendingComparison);
}
@Override
public boolean supportsNormalizedKey() {
return true;
}
@Override
public int getNormalizeKeyLen() {
return 6;
}
@Override
public boolean isNormalizedKeyPrefixOnly(int keyBytes) {
return keyBytes < getNormalizeKeyLen();
}
@Override
public void putNormalizedKey(LocalDate record, MemorySegment target, int offset, int numBytes) {
putNormalizedKeyLocalDate(record, target, offset, numBytes);
}
@Override
public LocalDateComparator duplicate() {
return new LocalDateComparator(ascendingComparison);
}
// --------------------------------------------------------------------------------------------
// Static Helpers for Date Comparison
// --------------------------------------------------------------------------------------------
public static int compareSerializedLocalDate(
DataInputView firstSource, DataInputView secondSource, boolean ascendingComparison)
throws IOException {
int cmp = firstSource.readInt() - secondSource.readInt();
if (cmp == 0) {
cmp = firstSource.readByte() - secondSource.readByte();
if (cmp == 0) {
cmp = firstSource.readByte() - secondSource.readByte();
}
}
return ascendingComparison ? cmp : -cmp;
}
public static void putNormalizedKeyLocalDate(
LocalDate record, MemorySegment target, int offset, int numBytes) {
int year = record.getYear();
int unsignedYear = year - Integer.MIN_VALUE;
if (numBytes >= 4) {
target.putIntBigEndian(offset, unsignedYear);
numBytes -= 4;
offset += 4;
} else if (numBytes > 0) {
for (int i = 0; numBytes > 0; numBytes--, i++) {
target.put(offset + i, (byte) (unsignedYear >>> ((3 - i) << 3)));
}
return;
}
int month = record.getMonthValue();
if (numBytes > 0) {
target.put(offset, (byte) (month & 0xff - Byte.MIN_VALUE));
numBytes -= 1;
offset += 1;
}
int day = record.getDayOfMonth();
if (numBytes > 0) {
target.put(offset, (byte) (day & 0xff - Byte.MIN_VALUE));
numBytes -= 1;
offset += 1;
}
for (int i = 0; i < numBytes; i++) {
target.put(offset + i, (byte) 0);
}
}
}
| LocalDateComparator |
java | google__dagger | javatests/dagger/functional/basic/Thing.java | {
"start": 727,
"end": 809
} | class ____ {
@Inject Thing(@SuppressWarnings("unused") OtherThing unused) {}
}
| Thing |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/system/Environment.java | {
"start": 887,
"end": 1330
} | interface ____ {
/**
* Standard {@link Environment} implementation backed by
* {@link System#getenv(String)}.
*/
Environment SYSTEM = System::getenv;
/**
* Gets the value of the specified environment variable.
* @param name the name of the environment variable
* @return the string value of the variable, or {@code null} if the variable is not
* defined in the environment
*/
@Nullable String get(String name);
}
| Environment |
java | apache__camel | components/camel-test/camel-test-junit5/src/main/java/org/apache/camel/test/junit5/AbstractTestSupport.java | {
"start": 5961,
"end": 15658
} | class ____ annotated with @TestInstance(TestInstance.Lifecycle.PER_CLASS).
* <p/>
* <b>Important:</b> Use this with care as the {@link CamelContext} will carry over state from previous tests, such
* as endpoints, components etc. So you cannot use this in all your tests.
* <p/>
*
* @deprecated Use the accessors from {@link #testConfiguration()} method
* @return <tt>true</tt> per class, <tt>false</tt> per test.
*/
@Deprecated(since = "4.7.0")
protected final boolean isCreateCamelContextPerClass() {
return testConfigurationBuilder.isCreateCamelContextPerClass();
}
/**
* Override to enable auto mocking endpoints based on the pattern.
* <p/>
* Return <tt>*</tt> to mock all endpoints.
*
* @see EndpointHelper#matchEndpoint(CamelContext, String, String)
* @deprecated Use the accessors from {@link #camelContextConfiguration()} method
*/
@Deprecated(since = "4.7.0")
public String isMockEndpoints() {
return camelContextConfiguration().mockEndpoints();
}
/**
* Override to enable auto mocking endpoints based on the pattern, and <b>skip</b> sending to original endpoint.
* <p/>
* Return <tt>*</tt> to mock all endpoints.
*
* @see EndpointHelper#matchEndpoint(CamelContext, String, String)
* @deprecated Use the accessors from {@link #camelContextConfiguration()} method
*/
@Deprecated(since = "4.7.0")
public String isMockEndpointsAndSkip() {
return camelContextConfiguration().mockEndpointsAndSkip();
}
/**
* Override to enable auto stub endpoints based on the pattern.
* <p/>
* Return <tt>*</tt> to mock all endpoints.
*
* @see EndpointHelper#matchEndpoint(CamelContext, String, String)
* @deprecated Use the accessors from {@link #camelContextConfiguration()} method
*/
@Deprecated(since = "4.11.0")
public String isStubEndpoints() {
return camelContextConfiguration().stubEndpoints();
}
/**
* Override to exclusive filtering of routes to not automatically start with Camel starts.
*
* The pattern support matching by route id or endpoint urls.
*
* Multiple patterns can be specified separated by comma, as example, to exclude all the routes starting from kafka
* or jms use: kafka,jms.
*
* @see EndpointHelper#matchEndpoint(CamelContext, String, String)
* @deprecated Use the accessors from {@link #camelContextConfiguration()} method
*/
@Deprecated(since = "4.11.0")
public String isAutoStartupExcludePatterns() {
return camelContextConfiguration().autoStartupExcludePatterns();
}
/**
* To replace from routes
*
* @param routeId
* @param fromEndpoint
* @deprecated Use the accessors from {@link #camelContextConfiguration()} method
*/
@Deprecated(since = "4.7.0")
public void replaceRouteFromWith(String routeId, String fromEndpoint) {
camelContextConfiguration.replaceRouteFromWith(routeId, fromEndpoint);
}
/**
* Used for filtering routes matching the given pattern, which follows the following rules:
* <p>
* - Match by route id - Match by route input endpoint uri
* <p>
* The matching is using exact match, by wildcard and regular expression.
* <p>
* For example to only include routes which starts with foo in their route id's, use: include=foo* And to
* exclude routes which starts from JMS endpoints, use: exclude=jms:*
* <p>
* Multiple patterns can be separated by comma, for example to exclude both foo and bar routes, use:
* exclude=foo*,bar*
* <p>
* Exclude takes precedence over include.
*/
@Deprecated(since = "4.7.0")
public String getRouteFilterIncludePattern() {
return camelContextConfiguration.routeFilterIncludePattern();
}
/**
* Used for filtering routes matching the given pattern, which follows the following rules:
* <p>
* - Match by route id - Match by route input endpoint uri
* <p>
* The matching is using exact match, by wildcard and regular expression.
* <p>
* For example to only include routes which starts with foo in their route id's, use: include=foo* And to
* exclude routes which starts from JMS endpoints, use: exclude=jms:*
* <p>
* Multiple patterns can be separated by comma, for example to exclude both foo and bar routes, use:
* exclude=foo*,bar*
* <p>
* Exclude takes precedence over include.
*/
@Deprecated(since = "4.7.0")
public String getRouteFilterExcludePattern() {
return camelContextConfiguration.routeFilterExcludePattern();
}
/**
* Override to enable debugger
* <p/>
* Is default <tt>false</tt>
*
* @deprecated Use the accessors from {@link #testConfiguration()} method
*/
@Deprecated(since = "4.7.0")
public boolean isUseDebugger() {
return camelContextConfiguration.useDebugger();
}
@Deprecated(since = "4.7.0")
public Service getCamelContextService() {
return camelContextConfiguration.camelContextService();
}
@Deprecated(since = "4.7.0")
public Service camelContextService() {
return camelContextConfiguration.camelContextService();
}
/**
* Gets a reference to the CamelContext. Must not be used during test setup.
*
* @return A reference to the CamelContext
*/
public CamelContext context() {
return context;
}
/**
* Sets the CamelContext. Used by the manager to override tests that try to access the context during setup. DO NOT
* USE.
*
* @param context
*/
@Deprecated(since = "4.7.0")
public void setContext(ModelCamelContext context) {
this.context = context;
}
public ProducerTemplate template() {
return template;
}
public FluentProducerTemplate fluentTemplate() {
return fluentTemplate;
}
public ConsumerTemplate consumer() {
return consumer;
}
/**
* Allows a service to be registered a separate lifecycle service to start and stop the context; such as for Spring
* when the ApplicationContext is started and stopped, rather than directly stopping the CamelContext
*/
public void setCamelContextService(Service service) {
camelContextConfiguration.withCamelContextService(service);
}
/**
* Whether JMX should be used during testing.
*
* @deprecated Use the methods {@link #testConfiguration()} to enable, disable or check JMX state.
* @return <tt>false</tt> by default.
*/
@Deprecated(since = "4.7.0")
protected boolean useJmx() {
return testConfigurationBuilder.isJmxEnabled();
}
/**
* Override this method to include and override properties with the Camel {@link PropertiesComponent}.
*
* @deprecated Use the accessors from {@link #camelContextConfiguration()} method
* @return additional properties to add/override.
*/
@Deprecated(since = "4.7.0")
protected Properties useOverridePropertiesWithPropertiesComponent() {
return camelContextConfiguration.useOverridePropertiesWithPropertiesComponent();
}
/**
* Whether to ignore missing locations with the {@link PropertiesComponent}. For example when unit testing you may
* want to ignore locations that are not available in the environment used for testing.
*
* @deprecated Use the accessors from {@link #camelContextConfiguration()} method
* @return <tt>true</tt> to ignore, <tt>false</tt> to not ignore, and <tt>null</tt> to leave as configured on
* the {@link PropertiesComponent}
*/
@Deprecated(since = "4.7.0")
protected Boolean ignoreMissingLocationWithPropertiesComponent() {
return camelContextConfiguration.ignoreMissingLocationWithPropertiesComponent();
}
/**
* Gets the {@link CamelContextConfiguration} for the test
*
* @return the camel context configuration
*/
@Override
public final CamelContextConfiguration camelContextConfiguration() {
return camelContextConfiguration;
}
/**
* Gets the {@link TestExecutionConfiguration} test execution configuration instance for the test
*
* @return the configuration instance for the test
*/
@Override
public final TestExecutionConfiguration testConfiguration() {
return testConfigurationBuilder;
}
/**
* Disables the JMX agent. Must be called before the setup method.
*
* @deprecated Use the methods {@link #testConfiguration()} to enable, disable or check JMX state.
*/
@Deprecated(since = "4.7.0")
protected void disableJMX() {
testConfigurationBuilder.withDisableJMX();
}
/**
* Enables the JMX agent. Must be called before the setup method.
*
* @deprecated Use the methods {@link #testConfiguration()} to enable, disable or check JMX state.
*/
@Deprecated(since = "4.7.0")
protected void enableJMX() {
testConfigurationBuilder.withEnableJMX();
}
/**
* Whether route coverage is enabled
*
* @deprecated Use the methods {@link #testConfiguration()} to enable or disable the route converage dumper
* @return true if enabled or false otherwise
*/
@Deprecated(since = "4.7.0")
protected boolean isRouteCoverageEnabled() {
return testConfigurationBuilder.isRouteCoverageEnabled();
}
}
| is |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/StubStoreOperations.java | {
"start": 2938,
"end": 3206
} | class ____<T>
implements RemoteIterator<T> {
@Override
public boolean hasNext() throws IOException {
return false;
}
@Override
public T next() throws IOException {
throw new NoSuchElementException();
}
}
}
| EmptyRemoteIterator |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/LogComponentBuilderFactory.java | {
"start": 5462,
"end": 6492
} | class ____
extends AbstractComponentBuilder<LogComponent>
implements LogComponentBuilder {
@Override
protected LogComponent buildConcreteComponent() {
return new LogComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "lazyStartProducer": ((LogComponent) component).setLazyStartProducer((boolean) value); return true;
case "sourceLocationLoggerName": ((LogComponent) component).setSourceLocationLoggerName((boolean) value); return true;
case "autowiredEnabled": ((LogComponent) component).setAutowiredEnabled((boolean) value); return true;
case "exchangeFormatter": ((LogComponent) component).setExchangeFormatter((org.apache.camel.spi.ExchangeFormatter) value); return true;
default: return false;
}
}
}
} | LogComponentBuilderImpl |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/component/ApiName.java | {
"start": 909,
"end": 1077
} | interface ____ {
/**
* Returns API name prefix path element for endpoint uri.
*
* @return unique API name prefix
*/
String getName();
}
| ApiName |
java | elastic__elasticsearch | x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtils.java | {
"start": 19224,
"end": 20880
} | class ____ implements AggValueExtractor {
private final Function<String, String> bucketKeyTransfomer;
MultiBucketsAggExtractor() {
this(Function.identity());
}
MultiBucketsAggExtractor(Function<String, String> bucketKeyTransfomer) {
this.bucketKeyTransfomer = Objects.requireNonNull(bucketKeyTransfomer);
}
@Override
public Object value(Aggregation agg, Map<String, String> fieldTypeMap, String lookupFieldPrefix) {
MultiBucketsAggregation aggregation = (MultiBucketsAggregation) agg;
var subAggLookupFieldPrefix = lookupFieldPrefix.isEmpty() ? agg.getName() : lookupFieldPrefix + "." + agg.getName();
Map<String, Object> nested = Maps.newLinkedHashMapWithExpectedSize(aggregation.getBuckets().size());
for (MultiBucketsAggregation.Bucket bucket : aggregation.getBuckets()) {
String bucketKey = bucketKeyTransfomer.apply(bucket.getKeyAsString());
if (bucket.getAggregations().iterator().hasNext() == false) {
nested.put(bucketKey, bucket.getDocCount());
} else {
Map<String, Object> nestedBucketObject = new LinkedHashMap<>();
for (Aggregation subAgg : bucket.getAggregations()) {
nestedBucketObject.put(subAgg.getName(), getExtractor(subAgg).value(subAgg, fieldTypeMap, subAggLookupFieldPrefix));
}
nested.put(bucketKey, nestedBucketObject);
}
}
return nested;
}
}
static | MultiBucketsAggExtractor |
java | quarkusio__quarkus | test-framework/junit5/src/main/java/io/quarkus/test/junit/launcher/DefaultInitContextBase.java | {
"start": 163,
"end": 1576
} | class ____ {
private final int httpPort;
private final int httpsPort;
private final Duration waitTime;
private final String testProfile;
private final List<String> argLine;
private final Map<String, String> env;
private final ArtifactLauncher.InitContext.DevServicesLaunchResult devServicesLaunchResult;
DefaultInitContextBase(int httpPort, int httpsPort, Duration waitTime, String testProfile,
List<String> argLine,
Map<String, String> env,
ArtifactLauncher.InitContext.DevServicesLaunchResult devServicesLaunchResult) {
this.httpPort = httpPort;
this.httpsPort = httpsPort;
this.waitTime = waitTime;
this.testProfile = testProfile;
this.argLine = argLine;
this.env = env;
this.devServicesLaunchResult = devServicesLaunchResult;
}
public int httpPort() {
return httpPort;
}
public int httpsPort() {
return httpsPort;
}
public Duration waitTime() {
return waitTime;
}
public String testProfile() {
return testProfile;
}
public List<String> argLine() {
return argLine;
}
public Map<String, String> env() {
return env;
}
public ArtifactLauncher.InitContext.DevServicesLaunchResult getDevServicesLaunchResult() {
return devServicesLaunchResult;
}
}
| DefaultInitContextBase |
java | apache__flink | flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/RestAPIITCaseBase.java | {
"start": 2159,
"end": 4141
} | class ____ {
@RegisterExtension
@Order(1)
private static final MiniClusterExtension MINI_CLUSTER = new MiniClusterExtension();
@RegisterExtension
@Order(2)
protected static final SqlGatewayServiceExtension SQL_GATEWAY_SERVICE_EXTENSION =
new SqlGatewayServiceExtension(MINI_CLUSTER::getClientConfiguration);
@Nullable private static TestingRestClient restClient = null;
@Nullable protected static String targetAddress = null;
@Nullable private static SqlGatewayRestEndpoint sqlGatewayRestEndpoint = null;
protected static int port = 0;
@BeforeAll
static void start() throws Exception {
final String address = InetAddress.getLoopbackAddress().getHostAddress();
Configuration config = getBaseConfig(getFlinkConfig(address, address, "0"));
sqlGatewayRestEndpoint =
new SqlGatewayRestEndpoint(config, SQL_GATEWAY_SERVICE_EXTENSION.getService());
sqlGatewayRestEndpoint.start();
InetSocketAddress serverAddress = checkNotNull(sqlGatewayRestEndpoint.getServerAddress());
restClient = getTestingRestClient();
targetAddress = serverAddress.getHostName();
port = serverAddress.getPort();
}
@AfterAll
static void stop() throws Exception {
checkNotNull(sqlGatewayRestEndpoint);
sqlGatewayRestEndpoint.close();
checkNotNull(restClient);
restClient.shutdown(Duration.ofSeconds(3));
}
public <
M extends MessageHeaders<R, P, U>,
U extends MessageParameters,
R extends RequestBody,
P extends ResponseBody>
CompletableFuture<P> sendRequest(M messageHeaders, U messageParameters, R request)
throws IOException {
checkNotNull(restClient);
return restClient.sendRequest(
targetAddress, port, messageHeaders, messageParameters, request);
}
}
| RestAPIITCaseBase |
java | apache__dubbo | dubbo-config/dubbo-config-api/src/test/java/org/apache/dubbo/config/utils/ReferenceCacheTest.java | {
"start": 1447,
"end": 7714
} | class ____ {
@BeforeEach
public void setUp() throws Exception {
DubboBootstrap.reset();
SysProps.clear();
SysProps.setProperty("dubbo.metrics.enabled", "false");
SysProps.setProperty("dubbo.metrics.protocol", "disabled");
MockReferenceConfig.setCounter(0);
XxxMockReferenceConfig.setCounter(0);
SimpleReferenceCache.CACHE_HOLDER.clear();
}
@AfterEach
public void tearDown() {
DubboBootstrap.reset();
SysProps.clear();
SimpleReferenceCache.CACHE_HOLDER.clear();
}
@Test
void testGetCacheSameReference() throws Exception {
ReferenceCache cache = SimpleReferenceCache.getCache();
MockReferenceConfig config =
buildMockReferenceConfig("org.apache.dubbo.config.utils.service.FooService", "group1", "1.0.0");
assertEquals(0L, config.getCounter());
Object proxy = cache.get(config);
assertTrue(config.isGetMethodRun());
// singleton reference config by default
MockReferenceConfig configCopy =
buildMockReferenceConfig("org.apache.dubbo.config.utils.service.FooService", "group1", "1.0.0");
assertEquals(1L, configCopy.getCounter());
Object proxyOfCopyConfig = cache.get(configCopy);
assertFalse(configCopy.isGetMethodRun());
assertEquals(1L, config.getCounter());
assertEquals(1L, configCopy.getCounter());
assertEquals(proxy, proxyOfCopyConfig);
}
@Test
void testGetCacheDiffReference() throws Exception {
ReferenceCache cache = SimpleReferenceCache.getCache();
MockReferenceConfig config =
buildMockReferenceConfig("org.apache.dubbo.config.utils.service.FooService", "group1", "1.0.0");
assertEquals(0L, config.getCounter());
cache.get(config);
assertEquals(1L, config.getCounter());
assertTrue(config.isGetMethodRun());
cache.get(config);
assertEquals(1L, config.getCounter());
XxxMockReferenceConfig configCopy =
buildXxxMockReferenceConfig("org.apache.dubbo.config.utils.service.XxxService", "group1", "1.0.0");
assertEquals(0L, configCopy.getCounter());
cache.get(configCopy);
assertTrue(configCopy.isGetMethodRun());
assertEquals(1L, configCopy.getCounter());
}
@Test
void testGetCacheWithKey() throws Exception {
ReferenceCache cache = SimpleReferenceCache.getCache();
MockReferenceConfig config =
buildMockReferenceConfig("org.apache.dubbo.config.utils.service.FooService", "group1", "1.0.0");
FooService value = cache.get(config);
assertEquals(
value, cache.get("group1/org.apache.dubbo.config.utils.service.FooService:1.0.0", FooService.class));
}
@Test
void testGetCacheDiffName() throws Exception {
SimpleReferenceCache cache = SimpleReferenceCache.getCache();
MockReferenceConfig config =
buildMockReferenceConfig("org.apache.dubbo.config.utils.service.FooService", "group1", "1.0.0");
assertEquals(0L, config.getCounter());
cache.get(config);
assertTrue(config.isGetMethodRun());
assertEquals(1L, config.getCounter());
cache = SimpleReferenceCache.getCache("foo");
config = buildMockReferenceConfig("org.apache.dubbo.config.utils.service.FooService", "group1", "1.0.0");
assertEquals(1L, config.getCounter());
cache.get(config);
// still init for the same ReferenceConfig if the cache is different
assertTrue(config.isGetMethodRun());
assertEquals(2L, config.getCounter());
}
@Test
void testDestroy() throws Exception {
SimpleReferenceCache cache = SimpleReferenceCache.getCache();
MockReferenceConfig config =
buildMockReferenceConfig("org.apache.dubbo.config.utils.service.FooService", "group1", "1.0.0");
cache.get(config);
XxxMockReferenceConfig configCopy =
buildXxxMockReferenceConfig("org.apache.dubbo.config.utils.service.XxxService", "group1", "1.0.0");
cache.get(configCopy);
assertEquals(2, cache.getReferenceMap().size());
cache.destroy(config);
assertTrue(config.isDestroyMethodRun());
assertEquals(1, cache.getReferenceMap().size());
cache.destroy(configCopy);
assertTrue(configCopy.isDestroyMethodRun());
assertEquals(0, cache.getReferenceMap().size());
}
@Test
void testDestroyAll() throws Exception {
SimpleReferenceCache cache = SimpleReferenceCache.getCache();
MockReferenceConfig config =
buildMockReferenceConfig("org.apache.dubbo.config.utils.service.FooService", "group1", "1.0.0");
cache.get(config);
XxxMockReferenceConfig configCopy =
buildXxxMockReferenceConfig("org.apache.dubbo.config.utils.service.XxxService", "group1", "1.0.0");
cache.get(configCopy);
assertEquals(2, cache.getReferenceMap().size());
cache.destroyAll();
assertTrue(config.isDestroyMethodRun());
assertTrue(configCopy.isDestroyMethodRun());
assertEquals(0, cache.getReferenceMap().size());
}
private MockReferenceConfig buildMockReferenceConfig(String service, String group, String version) {
MockReferenceConfig config = new MockReferenceConfig();
config.setApplication(new ApplicationConfig("cache"));
config.setRegistry(new RegistryConfig("multicast://224.5.6.7:1234"));
config.setCheck(false);
config.setInterface(service);
config.setGroup(group);
config.setVersion(version);
return config;
}
private XxxMockReferenceConfig buildXxxMockReferenceConfig(String service, String group, String version) {
XxxMockReferenceConfig config = new XxxMockReferenceConfig();
config.setApplication(new ApplicationConfig("cache"));
config.setRegistry(new RegistryConfig("multicast://224.5.6.7:1234"));
config.setInterface(service);
config.setCheck(false);
config.setGroup(group);
config.setVersion(version);
return config;
}
}
| ReferenceCacheTest |
java | google__guava | android/guava/src/com/google/common/math/BigIntegerMath.java | {
"start": 1870,
"end": 4599
} | class ____ {
/**
* Returns the smallest power of two greater than or equal to {@code x}. This is equivalent to
* {@code BigInteger.valueOf(2).pow(log2(x, CEILING))}.
*
* @throws IllegalArgumentException if {@code x <= 0}
* @since 20.0
*/
public static BigInteger ceilingPowerOfTwo(BigInteger x) {
return BigInteger.ZERO.setBit(log2(x, CEILING));
}
/**
* Returns the largest power of two less than or equal to {@code x}. This is equivalent to {@code
* BigInteger.valueOf(2).pow(log2(x, FLOOR))}.
*
* @throws IllegalArgumentException if {@code x <= 0}
* @since 20.0
*/
public static BigInteger floorPowerOfTwo(BigInteger x) {
return BigInteger.ZERO.setBit(log2(x, FLOOR));
}
/** Returns {@code true} if {@code x} represents a power of two. */
public static boolean isPowerOfTwo(BigInteger x) {
checkNotNull(x);
return x.signum() > 0 && x.getLowestSetBit() == x.bitLength() - 1;
}
/**
* Returns the base-2 logarithm of {@code x}, rounded according to the specified rounding mode.
*
* @throws IllegalArgumentException if {@code x <= 0}
* @throws ArithmeticException if {@code mode} is {@link RoundingMode#UNNECESSARY} and {@code x}
* is not a power of two
*/
@SuppressWarnings("fallthrough")
// TODO(kevinb): remove after this warning is disabled globally
public static int log2(BigInteger x, RoundingMode mode) {
checkPositive("x", checkNotNull(x));
int logFloor = x.bitLength() - 1;
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(isPowerOfTwo(x)); // fall through
case DOWN:
case FLOOR:
return logFloor;
case UP:
case CEILING:
return isPowerOfTwo(x) ? logFloor : logFloor + 1;
case HALF_DOWN:
case HALF_UP:
case HALF_EVEN:
if (logFloor < SQRT2_PRECOMPUTE_THRESHOLD) {
BigInteger halfPower =
SQRT2_PRECOMPUTED_BITS.shiftRight(SQRT2_PRECOMPUTE_THRESHOLD - logFloor);
if (x.compareTo(halfPower) <= 0) {
return logFloor;
} else {
return logFloor + 1;
}
}
// Since sqrt(2) is irrational, log2(x) - logFloor cannot be exactly 0.5
//
// To determine which side of logFloor.5 the logarithm is,
// we compare x^2 to 2^(2 * logFloor + 1).
BigInteger x2 = x.pow(2);
int logX2Floor = x2.bitLength() - 1;
return (logX2Floor < 2 * logFloor + 1) ? logFloor : logFloor + 1;
}
throw new AssertionError();
}
/*
* The maximum number of bits in a square root for which we'll precompute an explicit half power
* of two. This can be any value, but higher values incur more | BigIntegerMath |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlLicenseChecker.java | {
"start": 550,
"end": 2050
} | class ____ {
public static final LicensedFeature.Momentary CCS_FEATURE = LicensedFeature.momentary(
null,
"esql-ccs",
License.OperationMode.ENTERPRISE
);
/**
* Only call this method once you know the user is doing a cross-cluster query, as it will update
* the license_usage timestamp for the esql-ccs feature if the license is Enterprise (or Trial).
* @param licenseState
* @return true if the user has a license that allows ESQL CCS.
*/
public static boolean isCcsAllowed(XPackLicenseState licenseState) {
if (licenseState == null) {
return false;
}
return CCS_FEATURE.check(licenseState);
}
/**
* @param licenseState existing license state. Need to extract info on the current installed license.
* @return ElasticsearchStatusException with an error message informing the caller what license is needed
* to run ES|QL cross-cluster searches and what license (if any) was found.
*/
public static ElasticsearchStatusException invalidLicenseForCcsException(XPackLicenseState licenseState) {
String message = "A valid Enterprise license is required to run ES|QL cross-cluster searches. License found: ";
if (licenseState == null) {
message += "none";
} else {
message += licenseState.statusDescription();
}
return new ElasticsearchStatusException(message, RestStatus.BAD_REQUEST);
}
}
| EsqlLicenseChecker |
java | google__error-prone | core/src/test/java/com/google/errorprone/DiagnosticKindTest.java | {
"start": 1710,
"end": 2233
} | class ____ {", " void doIt() {", " return;", " }", "}"
};
private DiagnosticTestHelper diagnosticHelper;
private ErrorProneTestCompiler.Builder compilerBuilder;
@Before
public void setUp() {
diagnosticHelper = new DiagnosticTestHelper();
compilerBuilder =
new ErrorProneTestCompiler.Builder().listenToDiagnostics(diagnosticHelper.collector);
}
@BugPattern(
summary = "This is an error!",
explanation = "Don't do this!",
severity = SeverityLevel.ERROR)
public static | Test |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/util/FileFixture.java | {
"start": 992,
"end": 2279
} | class ____ {
private static Logger logger = Logger.getLogger(FolderFixture.class.getName());
final String name;
final FolderFixture parent;
private File file;
public FileFixture(String name, FolderFixture parent) throws IOException {
this.name = name;
this.parent = parent;
create();
}
private void create() throws IOException {
String path = relativePath();
file = new File(path);
if (!file.exists()) {
boolean fileCreated = file.createNewFile();
if (!fileCreated) throw new AssertionError("Unable to create file %s".formatted(quote(path)));
logger.info("Created file %s".formatted(quote(path)));
}
if (!file.isFile()) throw new AssertionError("%s should be a file".formatted(quote(path)));
logger.info("The file %s exists".formatted(quote(path)));
}
public void delete() {
String path = relativePath();
boolean fileDeleted = file.delete();
if (!fileDeleted) throw new AssertionError("Unable to delete file %s".formatted(quote(path)));
logger.info("The file %s was deleted".formatted(quote(path)));
}
String relativePath() {
return parent != null ? concat(parent.relativePath(), separator, name) : name;
}
String absolutePath() {
return file.getAbsolutePath();
}
}
| FileFixture |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/base/Util.java | {
"start": 770,
"end": 1265
} | class ____ {
/**
* A version of {@link Map#computeIfAbsent(Object, Function)} that allows {@code mappingFunction}
* to update {@code map}.
*/
public static <K, V> V reentrantComputeIfAbsent(
Map<K, V> map, K key, Function<? super K, ? extends V> mappingFunction) {
V value = map.get(key);
if (value == null) {
value = mappingFunction.apply(key);
if (value != null) {
map.put(key, value);
}
}
return value;
}
private Util() {}
}
| Util |
java | apache__kafka | group-coordinator/src/test/java/org/apache/kafka/coordinator/group/assignor/GroupSpecImplTest.java | {
"start": 1600,
"end": 4526
} | class ____ {
private static final String TEST_MEMBER = "test-member";
private Map<String, MemberSubscriptionAndAssignmentImpl> members;
private SubscriptionType subscriptionType;
private Map<Uuid, Map<Integer, String>> invertedTargetAssignment;
private GroupSpecImpl groupSpec;
private Uuid topicId;
@BeforeEach
void setUp() {
members = new HashMap<>();
subscriptionType = SubscriptionType.HOMOGENEOUS;
invertedTargetAssignment = new HashMap<>();
topicId = Uuid.randomUuid();
members.put(TEST_MEMBER, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(topicId),
Assignment.EMPTY
));
groupSpec = new GroupSpecImpl(
members,
subscriptionType,
invertedTargetAssignment
);
}
@Test
void testMemberIds() {
assertEquals(members.keySet(), groupSpec.memberIds());
}
@Test
void testSubscriptionType() {
assertEquals(subscriptionType, groupSpec.subscriptionType());
}
@Test
void testIsPartitionAssigned() {
Map<Integer, String> partitionMap = new HashMap<>();
partitionMap.put(1, "test-member");
invertedTargetAssignment.put(topicId, partitionMap);
assertTrue(groupSpec.isPartitionAssigned(topicId, 1));
assertFalse(groupSpec.isPartitionAssigned(topicId, 2));
assertFalse(groupSpec.isPartitionAssigned(Uuid.randomUuid(), 2));
}
@Test
void testMemberSubscription() {
assertEquals(members.get(TEST_MEMBER), groupSpec.memberSubscription(TEST_MEMBER));
assertThrows(IllegalArgumentException.class, () -> groupSpec.memberSubscription("unknown-member"));
}
@Test
void testMemberAssignment() {
Map<Uuid, Set<Integer>> topicPartitions = new HashMap<>();
topicPartitions.put(
topicId,
Set.of(0, 1)
);
members.put(TEST_MEMBER, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(topicId),
new Assignment(topicPartitions)
));
assertEquals(topicPartitions, groupSpec.memberAssignment(TEST_MEMBER).partitions());
assertEquals(Map.of(), groupSpec.memberAssignment("unknown-member").partitions());
}
@Test
void testIsPartitionAssignable() {
// Empty allowed map.
assertTrue(groupSpec.isPartitionAssignable(topicId, 1));
// Allowed map with data.
groupSpec = new GroupSpecImpl(
members,
subscriptionType,
invertedTargetAssignment,
Optional.of(Map.of(topicId, Set.of(0)))
);
assertTrue(groupSpec.isPartitionAssignable(topicId, 0));
assertFalse(groupSpec.isPartitionAssignable(topicId, 1));
}
}
| GroupSpecImplTest |
java | apache__flink | flink-table/flink-table-code-splitter/src/test/resources/block/code/TestIfInsideWhileLoopRewrite.java | {
"start": 7,
"end": 2591
} | class ____ {
int counter = 0;
public void myFun(int[] a, int[] b, int[] c ) throws RuntimeException {
a[0] += b[1];
b[1] += a[1];
while (counter < 10) {
c[counter] = a[0] + 1000;
System.out.println(c);
if (a[counter] > 0) {
b[counter] = a[counter] * 2;
c[counter] = b[counter] * 2;
System.out.println(b[counter]);
} else {
b[counter] = a[counter] * 3;
System.out.println(b[counter]);
}
a[2] += b[2];
b[3] += a[3];
if (a[0] > 0) {
System.out.println("Hello");
} else {
System.out.println("World");
}
counter--;
}
a[4] += b[4];
b[5] += a[5];
}
public void myFun2(int[] a, int[] b, int[] c) throws RuntimeException {
a[0] += b[1];
b[1] += a[1];
while (counter < 10) {
c[counter] = a[0] + 1000;
System.out.println(c);
if (a[counter] > 0) {
b[counter] = a[counter] * 2;
c[counter] = b[counter] * 2;
System.out.println(b[counter]);
} else {
b[counter] = a[counter] * 3;
System.out.println(b[counter]);
}
a[2] += b[2];
b[3] += a[3];
if (a[0] > 0) {
System.out.println("Hello");
} else {
System.out.println("World");
break;
}
counter--;
}
a[4] += b[4];
b[5] += a[5];
}
public void myFun3(int[] a, int[] b, int[] c) throws RuntimeException {
a[0] += b[1];
b[1] += a[1];
while (counter < 10) {
c[counter] = a[0] + 1000;
System.out.println(c);
if (a[counter] > 0) {
b[counter] = a[counter] * 2;
c[counter] = b[counter] * 2;
System.out.println(b[counter]);
} else {
b[counter] = a[counter] * 3;
System.out.println(b[counter]);
continue;
}
a[2] += b[2];
b[3] += a[3];
if (a[0] > 0) {
System.out.println("Hello");
} else {
System.out.println("World");
break;
}
counter--;
}
a[4] += b[4];
b[5] += a[5];
}
}
| TestIfInsideWhileLoopRewrite |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/NormalizedKeyUtil.java | {
"start": 1082,
"end": 6074
} | class ____ {
public static void putByteNormalizedKey(
byte value, MemorySegment target, int offset, int numBytes) {
if (numBytes == 1) {
// default case, full normalized key. need to explicitly convert to int to
// avoid false results due to implicit type conversion to int when subtracting
// the min byte value
int highByte = value & 0xff;
highByte -= Byte.MIN_VALUE;
target.put(offset, (byte) highByte);
} else if (numBytes <= 0) {
} else {
int highByte = value & 0xff;
highByte -= Byte.MIN_VALUE;
target.put(offset, (byte) highByte);
for (int i = 1; i < numBytes; i++) {
target.put(offset + i, (byte) 0);
}
}
}
public static void putCharNormalizedKey(
char value, MemorySegment target, int offset, int numBytes) {
// note that the char is an unsigned data type in java and consequently needs
// no code that transforms the signed representation to an offset representation
// that is equivalent to unsigned, when compared byte by byte
if (numBytes == 2) {
// default case, full normalized key
target.put(offset, (byte) ((value >>> 8) & 0xff));
target.put(offset + 1, (byte) ((value) & 0xff));
} else if (numBytes <= 0) {
} else if (numBytes == 1) {
target.put(offset, (byte) ((value >>> 8) & 0xff));
} else {
target.put(offset, (byte) ((value >>> 8) & 0xff));
target.put(offset + 1, (byte) ((value) & 0xff));
for (int i = 2; i < numBytes; i++) {
target.put(offset + i, (byte) 0);
}
}
}
public static void putBooleanNormalizedKey(
boolean value, MemorySegment target, int offset, int numBytes) {
if (numBytes > 0) {
target.put(offset, (byte) (value ? 1 : 0));
for (offset = offset + 1; numBytes > 1; numBytes--) {
target.put(offset++, (byte) 0);
}
}
}
public static void putShortNormalizedKey(
short value, MemorySegment target, int offset, int numBytes) {
if (numBytes == 2) {
// default case, full normalized key
int highByte = ((value >>> 8) & 0xff);
highByte -= Byte.MIN_VALUE;
target.put(offset, (byte) highByte);
target.put(offset + 1, (byte) ((value) & 0xff));
} else if (numBytes <= 0) {
} else if (numBytes == 1) {
int highByte = ((value >>> 8) & 0xff);
highByte -= Byte.MIN_VALUE;
target.put(offset, (byte) highByte);
} else {
int highByte = ((value >>> 8) & 0xff);
highByte -= Byte.MIN_VALUE;
target.put(offset, (byte) highByte);
target.put(offset + 1, (byte) ((value) & 0xff));
for (int i = 2; i < numBytes; i++) {
target.put(offset + i, (byte) 0);
}
}
}
public static void putIntNormalizedKey(
int iValue, MemorySegment target, int offset, int numBytes) {
putUnsignedIntegerNormalizedKey(iValue - Integer.MIN_VALUE, target, offset, numBytes);
}
public static void putUnsignedIntegerNormalizedKey(
int value, MemorySegment target, int offset, int numBytes) {
if (numBytes == 4) {
// default case, full normalized key
target.putIntBigEndian(offset, value);
} else if (numBytes > 0) {
if (numBytes < 4) {
for (int i = 0; numBytes > 0; numBytes--, i++) {
target.put(offset + i, (byte) (value >>> ((3 - i) << 3)));
}
} else {
target.putIntBigEndian(offset, value);
for (int i = 4; i < numBytes; i++) {
target.put(offset + i, (byte) 0);
}
}
}
}
public static void putLongNormalizedKey(
long lValue, MemorySegment target, int offset, int numBytes) {
putUnsignedLongNormalizedKey(lValue - Long.MIN_VALUE, target, offset, numBytes);
}
public static void putUnsignedLongNormalizedKey(
long value, MemorySegment target, int offset, int numBytes) {
if (numBytes == 8) {
// default case, full normalized key
target.putLongBigEndian(offset, value);
} else if (numBytes > 0) {
if (numBytes < 8) {
for (int i = 0; numBytes > 0; numBytes--, i++) {
target.put(offset + i, (byte) (value >>> ((7 - i) << 3)));
}
} else {
target.putLongBigEndian(offset, value);
for (int i = 8; i < numBytes; i++) {
target.put(offset + i, (byte) 0);
}
}
}
}
}
| NormalizedKeyUtil |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java | {
"start": 4455,
"end": 5114
} | class ____ for serial execution of evaluation steps.
*
* Each step consists of the following phases:
* 1. build search request with aggs requested by individual metrics
* 2. execute search action with the request built in (1.)
* 3. make all individual metrics process the search response obtained in (2.)
* 4. check if all the metrics have their results computed
* a) If so, call the final listener and finish
* b) Otherwise, add another step to the queue
*
* To avoid infinite loop it is essential that every metric *does* compute its result at some point.
* */
private static final | allows |
java | spring-projects__spring-boot | core/spring-boot-testcontainers/src/dockerTest/java/org/springframework/boot/testcontainers/ImportTestcontainersTests.java | {
"start": 6585,
"end": 6696
} | class ____ {
@DynamicPropertySource
void containerProperties() {
}
}
}
| BadArgsDynamicPropertySourceMethod |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/observers/inheritance/NonObservingSubBean.java | {
"start": 246,
"end": 382
} | class ____ extends ObservingBean {
public void watchFor(SimpleEvent event) {
value = event.content;
}
}
| NonObservingSubBean |
java | apache__kafka | metadata/src/test/java/org/apache/kafka/image/publisher/ControllerRegistrationsPublisherTest.java | {
"start": 1768,
"end": 4195
} | class ____ {
@Test
public void testInitialControllers() {
ControllerRegistrationsPublisher publisher = new ControllerRegistrationsPublisher();
assertEquals(Map.of(), publisher.controllers());
}
@Test
public void testName() {
ControllerRegistrationsPublisher publisher = new ControllerRegistrationsPublisher();
assertEquals("ControllerRegistrationsPublisher", publisher.name());
}
private static final MetadataDelta TEST_DELTA;
private static final MetadataImage TEST_IMAGE;
private static final MetadataProvenance PROVENANCE = new MetadataProvenance(100L, 10, 2000L, true);
static {
TEST_DELTA = new MetadataDelta.Builder().build();
TEST_DELTA.replay(new FeatureLevelRecord().
setName(MetadataVersion.FEATURE_NAME).
setFeatureLevel(MetadataVersion.IBP_3_6_IV2.featureLevel()));
TEST_DELTA.replay(RecordTestUtils.createTestControllerRegistration(0, true));
TEST_DELTA.replay(RecordTestUtils.createTestControllerRegistration(1, false));
TEST_DELTA.replay(RecordTestUtils.createTestControllerRegistration(2, false));
TEST_IMAGE = TEST_DELTA.apply(PROVENANCE);
}
@ParameterizedTest
@ValueSource(booleans = {false, true})
public void testOnMetadataUpdate(boolean fromSnapshot) {
ControllerRegistrationsPublisher publisher = new ControllerRegistrationsPublisher();
if (fromSnapshot) {
publisher.onMetadataUpdate(TEST_DELTA, TEST_IMAGE,
new SnapshotManifest(new MetadataProvenance(100L, 10, 2000L, true), 100L));
} else {
publisher.onMetadataUpdate(TEST_DELTA, TEST_IMAGE,
LogDeltaManifest.newBuilder().
provenance(PROVENANCE).
leaderAndEpoch(new LeaderAndEpoch(OptionalInt.of(1), 200)).
numBatches(3).
elapsedNs(1000L).
numBytes(234).
build());
}
System.out.println("TEST_IMAGE.cluster = " + TEST_IMAGE.cluster());
assertEquals(Set.of(0, 1, 2), publisher.controllers().keySet());
assertTrue(publisher.controllers().get(0).zkMigrationReady());
assertFalse(publisher.controllers().get(1).zkMigrationReady());
assertFalse(publisher.controllers().get(2).zkMigrationReady());
}
}
| ControllerRegistrationsPublisherTest |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/expr/SQLDefaultExpr.java | {
"start": 861,
"end": 1469
} | class ____ extends SQLExprImpl implements SQLLiteralExpr {
@Override
public boolean equals(Object o) {
return o instanceof SQLDefaultExpr;
}
@Override
public int hashCode() {
return 0;
}
@Override
protected void accept0(SQLASTVisitor visitor) {
visitor.visit(this);
visitor.endVisit(this);
}
public String toString() {
return "DEFAULT";
}
public SQLDefaultExpr clone() {
return new SQLDefaultExpr();
}
public List<SQLObject> getChildren() {
return Collections.emptyList();
}
}
| SQLDefaultExpr |
java | quarkusio__quarkus | extensions/info/deployment-spi/src/main/java/io/quarkus/info/deployment/spi/InfoBuildTimeContributorBuildItem.java | {
"start": 311,
"end": 654
} | class ____ extends MultiBuildItem {
private final InfoContributor infoContributor;
public InfoBuildTimeContributorBuildItem(InfoContributor infoContributor) {
this.infoContributor = infoContributor;
}
public InfoContributor getInfoContributor() {
return infoContributor;
}
}
| InfoBuildTimeContributorBuildItem |
java | quarkusio__quarkus | extensions/cache/deployment/src/test/java/io/quarkus/cache/test/deployment/PrivateMethodExceptionsTest.java | {
"start": 809,
"end": 2318
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar.addClasses(TestResource.class)
// These checks are only useful if Arc's more generic check is disabled.
.addAsResource(new StringAsset("quarkus.arc.fail-on-intercepted-private-method=false"),
"application.properties"))
.assertException(t -> {
assertEquals(DeploymentException.class, t.getClass());
assertEquals(3, t.getSuppressed().length);
assertPrivateMethodTargetException(t, "shouldThrowPrivateMethodTargetException", 1);
assertPrivateMethodTargetException(t, "shouldAlsoThrowPrivateMethodTargetException", 2);
});
private static void assertPrivateMethodTargetException(Throwable t, String expectedMethodName, long expectedCount) {
assertEquals(expectedCount, filterSuppressed(t, PrivateMethodTargetException.class)
.filter(s -> expectedMethodName.equals(s.getMethodInfo().name())).count());
}
private static <T extends RuntimeException> Stream<T> filterSuppressed(Throwable t, Class<T> filterClass) {
return stream(t.getSuppressed()).filter(filterClass::isInstance).map(filterClass::cast);
}
@Test
public void shouldNotBeInvoked() {
fail("This method should not be invoked");
}
@Path("/test")
static | PrivateMethodExceptionsTest |
java | apache__camel | components/camel-google/camel-google-drive/src/test/java/org/apache/camel/component/google/drive/DriveRepliesIT.java | {
"start": 1758,
"end": 6005
} | class ____ extends AbstractGoogleDriveTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(DriveRepliesIT.class);
private static final String PATH_PREFIX
= GoogleDriveApiCollection.getCollection().getApiName(DriveRepliesApiMethod.class).getName();
@Test
public void testReplyToComment() {
// 1. create test file
File testFile = uploadTestFile();
String fileId = testFile.getId();
// 2. comment on that file
Map<String, Object> headers = new HashMap<>();
// parameter type is String
headers.put("CamelGoogleDrive.fileId", fileId);
// parameter type is com.google.api.services.drive.model.Comment
com.google.api.services.drive.model.Comment comment = new com.google.api.services.drive.model.Comment();
comment.setContent("Camel rocks!");
headers.put("CamelGoogleDrive.content", comment);
requestBodyAndHeaders("direct://INSERT_COMMENT", null, headers);
// 3. get a list of comments on the file
// using String message body for single parameter "fileId"
com.google.api.services.drive.model.CommentList result1 = requestBody("direct://LIST_COMMENTS", fileId);
assertNotNull(result1.get("items"));
LOG.debug("list: {}", result1);
Comment comment2 = result1.getComments().get(0);
String commentId = comment2.getId();
// 4. add reply
headers = new HashMap<>();
// parameter type is String
headers.put("CamelGoogleDrive.fileId", fileId);
// parameter type is String
headers.put("CamelGoogleDrive.commentId", commentId);
// parameter type is com.google.api.services.drive.model.CommentReply
com.google.api.services.drive.model.Reply reply = new com.google.api.services.drive.model.Reply();
reply.setContent("I know :-)");
headers.put("CamelGoogleDrive.content", reply);
requestBodyAndHeaders("direct://INSERT", null, headers);
// 5. list replies on comment to file
headers = new HashMap<>();
// parameter type is String
headers.put("CamelGoogleDrive.fileId", fileId);
// parameter type is String
headers.put("CamelGoogleDrive.commentId", commentId);
final com.google.api.services.drive.model.Reply result
= requestBodyAndHeaders("direct://LIST", null, headers);
assertNotNull(result, "list result");
LOG.debug("list: {}", result);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// test route for delete
from("direct://DELETE")
.to("google-drive://" + PATH_PREFIX + "/delete");
// test route for get
from("direct://GET")
.to("google-drive://" + PATH_PREFIX + "/get");
// test route for insert
from("direct://INSERT")
.to("google-drive://" + PATH_PREFIX + "/insert");
// test route for list
from("direct://LIST")
.to("google-drive://" + PATH_PREFIX + "/list");
// test route for patch
from("direct://PATCH")
.to("google-drive://" + PATH_PREFIX + "/patch");
// test route for update
from("direct://UPDATE")
.to("google-drive://" + PATH_PREFIX + "/update");
// just used to upload file for test
from("direct://INSERT_1")
.to("google-drive://"
+ GoogleDriveApiCollection.getCollection().getApiName(DriveFilesApiMethod.class).getName()
+ "/insert");
// test route for insert
from("direct://INSERT_COMMENT")
.to("google-drive://drive-comments/insert");
// test route for list
from("direct://LIST_COMMENTS")
.to("google-drive://drive-comments/list?inBody=fileId");
}
};
}
}
| DriveRepliesIT |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/localdate/LocalDateAssert_isBetween_with_String_parameters_Test.java | {
"start": 979,
"end": 2148
} | class ____ extends org.assertj.core.api.LocalDateAssertBaseTest {
private LocalDate before = now.minusDays(1);
private LocalDate after = now.plusDays(1);
@Override
protected LocalDateAssert invoke_api_method() {
return assertions.isBetween(before.toString(), after.toString());
}
@Override
protected void verify_internal_effects() {
verify(comparables).assertIsBetween(getInfo(assertions), getActual(assertions), before, after, true, true);
}
@Test
void should_throw_a_DateTimeParseException_if_start_String_parameter_cant_be_converted() {
// GIVEN
String abc = "abc";
// WHEN
Throwable thrown = catchThrowable(() -> assertions.isBetween(abc, after.toString()));
// THEN
assertThat(thrown).isInstanceOf(DateTimeParseException.class);
}
@Test
void should_throw_a_DateTimeParseException_if_end_String_parameter_cant_be_converted() {
// GIVEN
String abc = "abc";
// WHEN
Throwable thrown = catchThrowable(() -> assertions.isBetween(before.toString(), abc));
// THEN
assertThat(thrown).isInstanceOf(DateTimeParseException.class);
}
}
| LocalDateAssert_isBetween_with_String_parameters_Test |
java | alibaba__nacos | core/src/main/java/com/alibaba/nacos/core/context/addition/EngineContext.java | {
"start": 930,
"end": 1694
} | class ____ {
/**
* Nacos server version, such as v2.4.0.
*/
private String version;
private final Map<String, String> contexts;
public EngineContext() {
version = VersionUtils.version;
contexts = new HashMap<>(1);
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public String getContext(String key) {
return contexts.get(key);
}
public String getContext(String key, String defaultValue) {
return contexts.getOrDefault(key, defaultValue);
}
public void setContext(String key, String value) {
contexts.put(key, value);
}
}
| EngineContext |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/autoproxy/AspectJAutoProxyCreatorTests.java | {
"start": 17929,
"end": 18079
} | class ____ extends TestBean implements IMarkerTestBean {
@Marker
@Override
public int getAge() {
return super.getAge();
}
}
@Aspect
| MarkerTestBean |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java | {
"start": 931,
"end": 3502
} | class ____ implements RemoteClusterClient {
private final TransportService service;
private final String clusterAlias;
private final RemoteClusterService remoteClusterService;
private final Executor responseExecutor;
private final boolean ensureConnected;
RemoteClusterAwareClient(TransportService service, String clusterAlias, Executor responseExecutor, boolean ensureConnected) {
this.service = service;
this.clusterAlias = clusterAlias;
this.remoteClusterService = service.getRemoteClusterService();
this.responseExecutor = responseExecutor;
this.ensureConnected = ensureConnected;
}
@Override
public <Request extends ActionRequest, Response extends TransportResponse> void execute(
Transport.Connection connection,
RemoteClusterActionType<Response> action,
Request request,
ActionListener<Response> listener
) {
service.sendRequest(
connection,
action.name(),
request,
TransportRequestOptions.EMPTY,
new ActionListenerResponseHandler<>(listener, action.getResponseReader(), responseExecutor)
);
}
@Override
public <Request extends ActionRequest> void getConnection(@Nullable Request request, ActionListener<Transport.Connection> listener) {
SubscribableListener
.<Void>newForked(ensureConnectedListener -> {
if (ensureConnected) {
remoteClusterService.ensureConnected(clusterAlias, ensureConnectedListener);
} else {
ensureConnectedListener.onResponse(null);
}
})
.andThenApply(ignored -> {
try {
if (request instanceof RemoteClusterAwareRequest remoteClusterAwareRequest) {
return remoteClusterService.getConnection(remoteClusterAwareRequest.getPreferredTargetNode(), clusterAlias);
} else {
return remoteClusterService.getConnection(clusterAlias);
}
} catch (ConnectTransportException e) {
if (ensureConnected == false) {
// trigger another connection attempt, but don't wait for it to complete
remoteClusterService.ensureConnected(clusterAlias, ActionListener.noop());
}
throw e;
}
})
.addListener(listener);
}
}
| RemoteClusterAwareClient |
java | google__auto | value/src/it/functional/src/test/java/com/google/auto/value/AutoBuilderKotlinTest.java | {
"start": 5510,
"end": 6132
} | interface ____ {
static KotlinDataEightDefaultsBuilder builder() {
return new AutoBuilder_AutoBuilderKotlinTest_KotlinDataEightDefaultsBuilder();
}
KotlinDataEightDefaultsBuilder a1(int x);
KotlinDataEightDefaultsBuilder a2(int x);
KotlinDataEightDefaultsBuilder a3(int x);
KotlinDataEightDefaultsBuilder a4(int x);
KotlinDataEightDefaultsBuilder a5(int x);
KotlinDataEightDefaultsBuilder a6(int x);
KotlinDataEightDefaultsBuilder a7(int x);
KotlinDataEightDefaultsBuilder a8(int x);
KotlinDataEightDefaults build();
}
// We test a | KotlinDataEightDefaultsBuilder |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/util/YarnClientUtils.java | {
"start": 1894,
"end": 9927
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(YarnClientUtils.class);
private static final Base64 BASE_64_CODEC = new Base64(0);
private static final String ADD_LABEL_FORMAT_ERR_MSG =
"Input format for adding node-labels is not correct, it should be "
+ "labelName1[(exclusive=true/false)],LabelName2[] ..";
public static final String NO_LABEL_ERR_MSG =
"No cluster node-labels are specified";
/**
* Look up and return the resource manager's principal. This method
* automatically does the <code>_HOST</code> replacement in the principal and
* correctly handles HA resource manager configurations.
*
* @param conf the {@link Configuration} file from which to read the
* principal
* @return the resource manager's principal string or null if the
* {@link YarnConfiguration#RM_PRINCIPAL} property is not set in the
* {@code conf} parameter
* @throws IOException thrown if there's an error replacing the host name
*/
public static String getRmPrincipal(Configuration conf) throws IOException {
String principal = conf.get(YarnConfiguration.RM_PRINCIPAL);
String prepared = null;
if (principal != null) {
prepared = getRmPrincipal(principal, conf);
}
return prepared;
}
/**
* Perform the <code>_HOST</code> replacement in the {@code principal},
* Returning the result. Correctly handles HA resource manager configurations.
*
* @param rmPrincipal the principal string to prepare
* @param conf the configuration
* @return the prepared principal string
* @throws IOException thrown if there's an error replacing the host name
*/
public static String getRmPrincipal(String rmPrincipal, Configuration conf)
throws IOException {
if (rmPrincipal == null) {
throw new IllegalArgumentException("RM principal string is null");
}
if (HAUtil.isHAEnabled(conf)) {
conf = getYarnConfWithRmHaId(conf);
}
String hostname = conf.getSocketAddr(
YarnConfiguration.RM_ADDRESS,
YarnConfiguration.DEFAULT_RM_ADDRESS,
YarnConfiguration.DEFAULT_RM_PORT).getHostName();
return SecurityUtil.getServerPrincipal(rmPrincipal, hostname);
}
/**
* Creates node labels from string
* @param args nodelabels string to be parsed
* @return list of node labels
*/
public static List<NodeLabel> buildNodeLabelsFromStr(String args) {
List<NodeLabel> nodeLabels = new ArrayList<>();
for (String p : args.split(",")) {
if (!p.trim().isEmpty()) {
String labelName = p;
// Try to parse exclusive
boolean exclusive = NodeLabel.DEFAULT_NODE_LABEL_EXCLUSIVITY;
int leftParenthesisIdx = p.indexOf("(");
int rightParenthesisIdx = p.indexOf(")");
if ((leftParenthesisIdx == -1 && rightParenthesisIdx != -1)
|| (leftParenthesisIdx != -1 && rightParenthesisIdx == -1)) {
// Parentheses not match
throw new IllegalArgumentException(ADD_LABEL_FORMAT_ERR_MSG);
}
if (leftParenthesisIdx > 0 && rightParenthesisIdx > 0) {
if (leftParenthesisIdx > rightParenthesisIdx) {
// Parentheses not match
throw new IllegalArgumentException(ADD_LABEL_FORMAT_ERR_MSG);
}
String property = p.substring(p.indexOf("(") + 1, p.indexOf(")"));
if (property.contains("=")) {
String key = property.substring(0, property.indexOf("=")).trim();
String value =
property
.substring(property.indexOf("=") + 1, property.length())
.trim();
// Now we only support one property, which is exclusive, so check if
// key = exclusive and value = {true/false}
if ("exclusive".equals(key)
&& ImmutableSet.of("true", "false").contains(value)) {
exclusive = Boolean.parseBoolean(value);
} else {
throw new IllegalArgumentException(ADD_LABEL_FORMAT_ERR_MSG);
}
} else if (!property.trim().isEmpty()) {
throw new IllegalArgumentException(ADD_LABEL_FORMAT_ERR_MSG);
}
}
// Try to get labelName if there's "(..)"
if (labelName.contains("(")) {
labelName = labelName.substring(0, labelName.indexOf("(")).trim();
}
nodeLabels.add(NodeLabel.newInstance(labelName, exclusive));
}
}
if (nodeLabels.isEmpty()) {
throw new IllegalArgumentException(NO_LABEL_ERR_MSG);
}
return nodeLabels;
}
/**
* Returns a {@link YarnConfiguration} built from the {@code conf} parameter
* that is guaranteed to have the {@link YarnConfiguration#RM_HA_ID}
* property set.
*
* @param conf the base configuration
* @return a {@link YarnConfiguration} built from the base
* {@link Configuration}
* @throws IOException thrown if the {@code conf} parameter contains
* inconsistent properties
*/
@VisibleForTesting
static YarnConfiguration getYarnConfWithRmHaId(Configuration conf)
throws IOException {
YarnConfiguration yarnConf = new YarnConfiguration(conf);
if (yarnConf.get(YarnConfiguration.RM_HA_ID) == null) {
// If RM_HA_ID is not configured, use the first of RM_HA_IDS.
// Any valid RM HA ID should work.
String[] rmIds = yarnConf.getStrings(YarnConfiguration.RM_HA_IDS);
if ((rmIds != null) && (rmIds.length > 0)) {
yarnConf.set(YarnConfiguration.RM_HA_ID, rmIds[0]);
} else {
throw new IOException("RM_HA_IDS property is not set for HA resource "
+ "manager");
}
}
return yarnConf;
}
/**
* Generate SPNEGO challenge request token.
*
* @param server - hostname to contact
* @throws IOException thrown if doAs failed
* @throws InterruptedException thrown if doAs is interrupted
* @return SPNEGO token challenge
*/
public static String generateToken(String server) throws IOException,
InterruptedException {
UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
LOG.debug("The user credential is {}", currentUser);
String challenge = currentUser
.doAs(new PrivilegedExceptionAction<String>() {
@Override
public String run() throws Exception {
try {
GSSManager manager = GSSManager.getInstance();
// GSS name for server
GSSName serverName = manager.createName("HTTP@" + server,
GSSName.NT_HOSTBASED_SERVICE);
// Create a GSSContext for authentication with the service.
// We're passing client credentials as null since we want them to
// be read from the Subject.
// We're passing Oid as null to use the default.
GSSContext gssContext = manager.createContext(
serverName.canonicalize(null), null, null,
GSSContext.DEFAULT_LIFETIME);
gssContext.requestMutualAuth(true);
gssContext.requestCredDeleg(true);
// Establish context
byte[] inToken = new byte[0];
byte[] outToken = gssContext.initSecContext(inToken, 0,
inToken.length);
gssContext.dispose();
// Base64 encoded and stringified token for server
LOG.debug("Got valid challenge for host {}", serverName);
return new String(BASE_64_CODEC.encode(outToken),
StandardCharsets.US_ASCII);
} catch (GSSException e) {
LOG.error("Error: ", e);
throw new AuthenticationException(e);
}
}
});
return challenge;
}
public static boolean isYarnFederationEnabled(Configuration conf) {
boolean isEnabled = conf.getBoolean(YarnConfiguration.FEDERATION_ENABLED,
YarnConfiguration.DEFAULT_FEDERATION_ENABLED);
return isEnabled;
}
}
| YarnClientUtils |
java | google__guice | core/test/com/google/inject/RequireAtInjectOnConstructorsTest.java | {
"start": 6938,
"end": 7158
} | class ____ {
@SuppressWarnings("unused")
ManyConstructors() {}
@SuppressWarnings("unused")
ManyConstructors(String a) {}
@SuppressWarnings("unused")
ManyConstructors(int a) {}
}
}
| ManyConstructors |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/bean/destroy/DependentPreDestroyOnlyCalledOnceTest.java | {
"start": 3340,
"end": 3451
} | interface ____ {
}
@MyInterceptorBinding
@Interceptor
@Priority(1)
static | MyInterceptorBinding |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/subjects/MaybeSubject.java | {
"start": 11453,
"end": 12145
} | class ____<T>
extends AtomicReference<MaybeSubject<T>> implements Disposable {
private static final long serialVersionUID = -7650903191002190468L;
final MaybeObserver<? super T> downstream;
MaybeDisposable(MaybeObserver<? super T> actual, MaybeSubject<T> parent) {
this.downstream = actual;
lazySet(parent);
}
@Override
public void dispose() {
MaybeSubject<T> parent = getAndSet(null);
if (parent != null) {
parent.remove(this);
}
}
@Override
public boolean isDisposed() {
return get() == null;
}
}
}
| MaybeDisposable |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/files/Files_assertIsDirectoryRecursivelyContaining_Predicate_Test.java | {
"start": 1823,
"end": 2041
} | class ____ extends FilesSimpleBaseTest {
private static final String THE_GIVEN_FILTER_DESCRIPTION = "the given filter";
@TestInstance(PER_CLASS)
@Nested
| Files_assertIsDirectoryRecursivelyContaining_Predicate_Test |
java | grpc__grpc-java | xds/src/test/java/io/grpc/xds/orca/OrcaOobUtilTest.java | {
"start": 3719,
"end": 37530
} | class ____ {
private static final int NUM_SUBCHANNELS = 2;
private static final Attributes.Key<String> SUBCHANNEL_ATTR_KEY =
Attributes.Key.create("subchannel-attr-for-test");
private static final OrcaReportingConfig SHORT_INTERVAL_CONFIG =
OrcaReportingConfig.newBuilder().setReportInterval(5L, TimeUnit.NANOSECONDS).build();
private static final OrcaReportingConfig MEDIUM_INTERVAL_CONFIG =
OrcaReportingConfig.newBuilder().setReportInterval(543L, TimeUnit.MICROSECONDS).build();
private static final OrcaReportingConfig LONG_INTERVAL_CONFIG =
OrcaReportingConfig.newBuilder().setReportInterval(1232L, TimeUnit.MILLISECONDS).build();
@Rule public final GrpcCleanupRule cleanupRule = new GrpcCleanupRule();
@Rule public final MockitoRule mocks = MockitoJUnit.rule();
@SuppressWarnings({"rawtypes", "unchecked"})
private final List<EquivalentAddressGroup>[] eagLists = new List[NUM_SUBCHANNELS];
private final SubchannelStateListener[] mockStateListeners =
new SubchannelStateListener[NUM_SUBCHANNELS];
private final ManagedChannel[] channels = new ManagedChannel[NUM_SUBCHANNELS];
private final OpenRcaServiceImp[] orcaServiceImps = new OpenRcaServiceImp[NUM_SUBCHANNELS];
private final SynchronizationContext syncContext = new SynchronizationContext(
new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
throw new AssertionError(e);
}
});
private final FakeClock fakeClock = new FakeClock();
private final Helper origHelper = mock(Helper.class, delegatesTo(new FakeHelper()));
@Mock
private OrcaOobReportListener mockOrcaListener0;
@Mock
private OrcaOobReportListener mockOrcaListener1;
@Mock
private OrcaOobReportListener mockOrcaListener2;
@Mock private BackoffPolicy.Provider backoffPolicyProvider;
@Mock private BackoffPolicy backoffPolicy1;
@Mock private BackoffPolicy backoffPolicy2;
private FakeSubchannel[] subchannels = new FakeSubchannel[NUM_SUBCHANNELS];
private LoadBalancer.Helper orcaHelper;
private LoadBalancer.Helper parentHelper;
private LoadBalancer.Helper childHelper;
private Subchannel savedParentSubchannel;
private static FakeSubchannel unwrap(Subchannel s) {
return (FakeSubchannel) ((SubchannelImpl) s).delegate();
}
private static OrcaLoadReportRequest buildOrcaRequestFromConfig(
OrcaReportingConfig config) {
return OrcaLoadReportRequest.newBuilder()
.setReportInterval(Durations.fromNanos(config.getReportIntervalNanos()))
.build();
}
private static void assertLog(List<String> logs, String expectedLog) {
assertThat(logs).contains(expectedLog);
logs.clear();
}
@After
public void tearDown() {
for (int i = 0; i < NUM_SUBCHANNELS; i++) {
if (subchannels[i] != null) {
subchannels[i].shutdown();
}
}
}
@Test
public void orcaReportingConfig_construct() {
int interval = new Random().nextInt(Integer.MAX_VALUE);
OrcaReportingConfig config =
OrcaReportingConfig.newBuilder()
.setReportInterval(interval, TimeUnit.MICROSECONDS)
.build();
assertThat(config.getReportIntervalNanos()).isEqualTo(TimeUnit.MICROSECONDS.toNanos(interval));
String str = config.toString();
assertThat(str).contains("reportIntervalNanos=");
OrcaReportingConfig rebuildedConfig = config.toBuilder().build();
assertThat(rebuildedConfig.getReportIntervalNanos())
.isEqualTo(TimeUnit.MICROSECONDS.toNanos(interval));
}
@Before
public void setUp() throws Exception {
for (int i = 0; i < NUM_SUBCHANNELS; i++) {
orcaServiceImps[i] = new OpenRcaServiceImp();
cleanupRule.register(
InProcessServerBuilder.forName("orca-reporting-test-" + i)
.addService(orcaServiceImps[i])
.directExecutor()
.build()
.start());
ManagedChannel channel =
cleanupRule.register(
InProcessChannelBuilder.forName("orca-reporting-test-" + i).directExecutor().build());
channels[i] = channel;
EquivalentAddressGroup eag =
new EquivalentAddressGroup(new FakeSocketAddress("address-" + i));
List<EquivalentAddressGroup> eagList = Arrays.asList(eag);
eagLists[i] = eagList;
mockStateListeners[i] = mock(SubchannelStateListener.class);
}
when(backoffPolicyProvider.get()).thenReturn(backoffPolicy1, backoffPolicy2);
when(backoffPolicy1.nextBackoffNanos()).thenReturn(11L, 21L);
when(backoffPolicy2.nextBackoffNanos()).thenReturn(12L, 22L);
orcaHelper =
OrcaOobUtil.newOrcaReportingHelper(
origHelper,
backoffPolicyProvider,
fakeClock.getStopwatchSupplier());
parentHelper =
new ForwardingLoadBalancerHelper() {
@Override
protected Helper delegate() {
return orcaHelper;
}
@Override
public Subchannel createSubchannel(CreateSubchannelArgs args) {
Subchannel subchannel = super.createSubchannel(args);
savedParentSubchannel = subchannel;
return subchannel;
}
};
childHelper =
OrcaOobUtil.newOrcaReportingHelper(
parentHelper,
backoffPolicyProvider,
fakeClock.getStopwatchSupplier());
}
@Test
public void singlePolicyTypicalWorkflow() {
verify(origHelper, atLeast(0)).getSynchronizationContext();
verifyNoMoreInteractions(origHelper);
// Calling createSubchannel() on orcaHelper correctly passes augmented CreateSubchannelArgs
// to origHelper.
ArgumentCaptor<CreateSubchannelArgs> createArgsCaptor =
ArgumentCaptor.forClass(CreateSubchannelArgs.class);
for (int i = 0; i < NUM_SUBCHANNELS; i++) {
String subchannelAttrValue = "eag attr " + i;
Attributes attrs =
Attributes.newBuilder().set(SUBCHANNEL_ATTR_KEY, subchannelAttrValue).build();
Subchannel created = createSubchannel(orcaHelper, i, attrs);
assertThat(unwrap(created)).isSameInstanceAs(subchannels[i]);
setOrcaReportConfig(created, mockOrcaListener0, SHORT_INTERVAL_CONFIG);
verify(origHelper, times(i + 1)).createSubchannel(createArgsCaptor.capture());
assertThat(createArgsCaptor.getValue().getAddresses()).isEqualTo(eagLists[i]);
assertThat(createArgsCaptor.getValue().getAttributes().get(SUBCHANNEL_ATTR_KEY))
.isEqualTo(subchannelAttrValue);
}
// ORCA reporting does not start until underlying Subchannel is READY.
for (int i = 0; i < NUM_SUBCHANNELS; i++) {
FakeSubchannel subchannel = subchannels[i];
OpenRcaServiceImp orcaServiceImp = orcaServiceImps[i];
SubchannelStateListener mockStateListener = mockStateListeners[i];
InOrder inOrder = inOrder(mockStateListener);
deliverSubchannelState(i, ConnectivityStateInfo.forNonError(IDLE));
deliverSubchannelState(i, ConnectivityStateInfo.forTransientFailure(Status.UNAVAILABLE));
deliverSubchannelState(i, ConnectivityStateInfo.forNonError(CONNECTING));
inOrder.verify(mockStateListener)
.onSubchannelState(eq(ConnectivityStateInfo.forNonError(IDLE)));
inOrder.verify(mockStateListener)
.onSubchannelState(eq(ConnectivityStateInfo.forTransientFailure(Status.UNAVAILABLE)));
inOrder.verify(mockStateListener)
.onSubchannelState(eq(ConnectivityStateInfo.forNonError(CONNECTING)));
verifyNoMoreInteractions(mockStateListener);
assertThat(subchannel.logs).isEmpty();
assertThat(orcaServiceImp.calls).isEmpty();
verifyNoMoreInteractions(mockOrcaListener0);
deliverSubchannelState(i, ConnectivityStateInfo.forNonError(READY));
verify(mockStateListener).onSubchannelState(eq(ConnectivityStateInfo.forNonError(READY)));
assertThat(orcaServiceImp.calls).hasSize(1);
ServerSideCall serverCall = orcaServiceImp.calls.peek();
assertThat(serverCall.request).isEqualTo(buildOrcaRequestFromConfig(SHORT_INTERVAL_CONFIG));
assertLog(subchannel.logs,
"DEBUG: Starting ORCA reporting for " + subchannel.getAllAddresses());
// Simulate an ORCA service response. Registered listener will receive an ORCA report for
// each backend.
OrcaLoadReport report = OrcaLoadReport.getDefaultInstance();
serverCall.responseObserver.onNext(report);
assertLog(subchannel.logs, "DEBUG: Received an ORCA report: " + report);
verify(mockOrcaListener0, times(i + 1)).onLoadReport(
argThat(new OrcaPerRequestUtilTest.MetricsReportMatcher(
OrcaPerRequestUtil.fromOrcaLoadReport(report))));
}
for (int i = 0; i < NUM_SUBCHANNELS; i++) {
FakeSubchannel subchannel = subchannels[i];
SubchannelStateListener mockStateListener = mockStateListeners[i];
ServerSideCall serverCall = orcaServiceImps[i].calls.peek();
assertThat(serverCall.cancelled).isFalse();
verifyNoMoreInteractions(mockStateListener);
// Shutting down the subchannel will cancel the ORCA reporting RPC.
subchannel.shutdown();
verify(mockStateListener).onSubchannelState(eq(ConnectivityStateInfo.forNonError(SHUTDOWN)));
assertThat(serverCall.cancelled).isTrue();
assertThat(subchannel.logs).isEmpty();
verifyNoMoreInteractions(mockOrcaListener0);
}
for (int i = 0; i < NUM_SUBCHANNELS; i++) {
assertThat(orcaServiceImps[i].calls).hasSize(1);
}
verifyNoInteractions(backoffPolicyProvider);
}
@Test
public void twoLevelPoliciesTypicalWorkflow() {
verify(origHelper, atLeast(0)).getSynchronizationContext();
verifyNoMoreInteractions(origHelper);
// Calling createSubchannel() on child helper correctly passes augmented CreateSubchannelArgs
// to origHelper.
ArgumentCaptor<CreateSubchannelArgs> createArgsCaptor =
ArgumentCaptor.forClass(CreateSubchannelArgs.class);
for (int i = 0; i < NUM_SUBCHANNELS; i++) {
String subchannelAttrValue = "eag attr " + i;
Attributes attrs =
Attributes.newBuilder().set(SUBCHANNEL_ATTR_KEY, subchannelAttrValue).build();
Subchannel created = createSubchannel(childHelper, i, attrs);
assertThat(unwrap(((SubchannelImpl) created).delegate())).isSameInstanceAs(subchannels[i]);
OrcaOobUtil.setListener(created, mockOrcaListener1, SHORT_INTERVAL_CONFIG);
verify(origHelper, times(i + 1)).createSubchannel(createArgsCaptor.capture());
assertThat(createArgsCaptor.getValue().getAddresses()).isEqualTo(eagLists[i]);
assertThat(createArgsCaptor.getValue().getAttributes().get(SUBCHANNEL_ATTR_KEY))
.isEqualTo(subchannelAttrValue);
}
// ORCA reporting does not start until underlying Subchannel is READY.
for (int i = 0; i < NUM_SUBCHANNELS; i++) {
FakeSubchannel subchannel = subchannels[i];
OpenRcaServiceImp orcaServiceImp = orcaServiceImps[i];
SubchannelStateListener mockStateListener = mockStateListeners[i];
InOrder inOrder = inOrder(mockStateListener);
deliverSubchannelState(i, ConnectivityStateInfo.forNonError(IDLE));
deliverSubchannelState(i, ConnectivityStateInfo.forTransientFailure(Status.UNAVAILABLE));
deliverSubchannelState(i, ConnectivityStateInfo.forNonError(CONNECTING));
inOrder
.verify(mockStateListener).onSubchannelState(eq(ConnectivityStateInfo.forNonError(IDLE)));
inOrder
.verify(mockStateListener)
.onSubchannelState(eq(ConnectivityStateInfo.forTransientFailure(Status.UNAVAILABLE)));
inOrder
.verify(mockStateListener)
.onSubchannelState(eq(ConnectivityStateInfo.forNonError(CONNECTING)));
verifyNoMoreInteractions(mockStateListener);
assertThat(subchannel.logs).isEmpty();
assertThat(orcaServiceImp.calls).isEmpty();
verifyNoMoreInteractions(mockOrcaListener1);
verifyNoMoreInteractions(mockOrcaListener2);
deliverSubchannelState(i, ConnectivityStateInfo.forNonError(READY));
verify(mockStateListener).onSubchannelState(eq(ConnectivityStateInfo.forNonError(READY)));
assertThat(orcaServiceImp.calls).hasSize(1);
ServerSideCall serverCall = orcaServiceImp.calls.peek();
assertThat(serverCall.request).isEqualTo(buildOrcaRequestFromConfig(SHORT_INTERVAL_CONFIG));
assertLog(subchannel.logs,
"DEBUG: Starting ORCA reporting for " + subchannel.getAllAddresses());
// Simulate an ORCA service response. Registered listener will receive an ORCA report for
// each backend.
OrcaLoadReport report = OrcaLoadReport.getDefaultInstance();
serverCall.responseObserver.onNext(report);
assertLog(subchannel.logs, "DEBUG: Received an ORCA report: " + report);
verify(mockOrcaListener1, times(i + 1)).onLoadReport(
argThat(new OrcaPerRequestUtilTest.MetricsReportMatcher(
OrcaPerRequestUtil.fromOrcaLoadReport(report))));
}
for (int i = 0; i < NUM_SUBCHANNELS; i++) {
FakeSubchannel subchannel = subchannels[i];
SubchannelStateListener mockStateListener = mockStateListeners[i];
ServerSideCall serverCall = orcaServiceImps[i].calls.peek();
assertThat(serverCall.cancelled).isFalse();
verifyNoMoreInteractions(mockStateListener);
// Shutting down the subchannel will cancel the ORCA reporting RPC.
subchannel.shutdown();
verify(mockStateListener).onSubchannelState(eq(ConnectivityStateInfo.forNonError(SHUTDOWN)));
assertThat(serverCall.cancelled).isTrue();
assertThat(subchannel.logs).isEmpty();
verifyNoMoreInteractions(mockOrcaListener1, mockOrcaListener2);
}
for (int i = 0; i < NUM_SUBCHANNELS; i++) {
assertThat(orcaServiceImps[i].calls).hasSize(1);
}
verifyNoInteractions(backoffPolicyProvider);
}
@Test
public void orcReportingDisabledWhenServiceNotImplemented() {
final Subchannel created = createSubchannel(orcaHelper, 0, Attributes.EMPTY);
OrcaOobUtil.setListener(created, mockOrcaListener0, SHORT_INTERVAL_CONFIG);
FakeSubchannel subchannel = subchannels[0];
OpenRcaServiceImp orcaServiceImp = orcaServiceImps[0];
SubchannelStateListener mockStateListener = mockStateListeners[0];
deliverSubchannelState(0, ConnectivityStateInfo.forNonError(READY));
verify(mockStateListener).onSubchannelState(eq(ConnectivityStateInfo.forNonError(READY)));
assertThat(orcaServiceImp.calls).hasSize(1);
ServerSideCall serverCall = orcaServiceImp.calls.poll();
assertThat(serverCall.request).isEqualTo(buildOrcaRequestFromConfig(SHORT_INTERVAL_CONFIG));
subchannel.logs.clear();
serverCall.responseObserver.onError(Status.UNIMPLEMENTED.asException());
assertLog(subchannel.logs,
"ERROR: OpenRcaService disabled: " + Status.UNIMPLEMENTED);
verifyNoMoreInteractions(mockOrcaListener0);
// Re-connecting on Subchannel will reset the "disabled" flag and restart ORCA reporting.
assertThat(orcaServiceImp.calls).hasSize(0);
deliverSubchannelState(0, ConnectivityStateInfo.forNonError(IDLE));
deliverSubchannelState(0, ConnectivityStateInfo.forNonError(READY));
assertLog(subchannel.logs,
"DEBUG: Starting ORCA reporting for " + subchannel.getAllAddresses());
assertThat(orcaServiceImp.calls).hasSize(1);
serverCall = orcaServiceImp.calls.poll();
OrcaLoadReport report = OrcaLoadReport.getDefaultInstance();
serverCall.responseObserver.onNext(report);
assertLog(subchannel.logs, "DEBUG: Received an ORCA report: " + report);
verify(mockOrcaListener0).onLoadReport(
argThat(new OrcaPerRequestUtilTest.MetricsReportMatcher(
OrcaPerRequestUtil.fromOrcaLoadReport(report))));
verifyNoInteractions(backoffPolicyProvider);
}
@Test
public void orcaReportingStreamClosedAndRetried() {
final Subchannel created = createSubchannel(orcaHelper, 0, Attributes.EMPTY);
OrcaOobUtil.setListener(created, mockOrcaListener0, SHORT_INTERVAL_CONFIG);
FakeSubchannel subchannel = subchannels[0];
OpenRcaServiceImp orcaServiceImp = orcaServiceImps[0];
SubchannelStateListener mockStateListener = mockStateListeners[0];
InOrder inOrder = inOrder(mockStateListener, mockOrcaListener0, backoffPolicyProvider,
backoffPolicy1, backoffPolicy2);
deliverSubchannelState(0, ConnectivityStateInfo.forNonError(READY));
inOrder
.verify(mockStateListener).onSubchannelState(eq(ConnectivityStateInfo.forNonError(READY)));
assertLog(subchannel.logs,
"DEBUG: Starting ORCA reporting for " + subchannel.getAllAddresses());
// Server closes the ORCA reporting RPC without any response, will start backoff
// sequence 1 (11ns).
orcaServiceImp.calls.poll().responseObserver.onCompleted();
assertLog(subchannel.logs,
"DEBUG: ORCA reporting stream closed with " + Status.OK + ", backoff in 11" + " ns");
inOrder.verify(backoffPolicyProvider).get();
inOrder.verify(backoffPolicy1).nextBackoffNanos();
verifyRetryAfterNanos(inOrder, orcaServiceImp, 11);
assertLog(subchannel.logs,
"DEBUG: Starting ORCA reporting for " + subchannel.getAllAddresses());
// Server closes the ORCA reporting RPC with an error, will continue backoff sequence 1 (21ns).
orcaServiceImp.calls.poll().responseObserver.onError(Status.UNAVAILABLE.asException());
assertLog(subchannel.logs,
"DEBUG: ORCA reporting stream closed with " + Status.UNAVAILABLE + ", backoff in 21"
+ " ns");
inOrder.verify(backoffPolicy1).nextBackoffNanos();
verifyRetryAfterNanos(inOrder, orcaServiceImp, 21);
assertLog(subchannel.logs,
"DEBUG: Starting ORCA reporting for " + subchannel.getAllAddresses());
// Server responds normally.
OrcaLoadReport report = OrcaLoadReport.getDefaultInstance();
orcaServiceImp.calls.peek().responseObserver.onNext(report);
assertLog(subchannel.logs, "DEBUG: Received an ORCA report: " + report);
inOrder.verify(mockOrcaListener0).onLoadReport(
argThat(new OrcaPerRequestUtilTest.MetricsReportMatcher(
OrcaPerRequestUtil.fromOrcaLoadReport(report))));
// Server closes the ORCA reporting RPC after a response, will restart immediately.
orcaServiceImp.calls.poll().responseObserver.onCompleted();
assertThat(subchannel.logs).containsExactly(
"DEBUG: ORCA reporting stream closed with " + Status.OK + ", backoff in 0" + " ns",
"DEBUG: Starting ORCA reporting for " + subchannel.getAllAddresses());
subchannel.logs.clear();
// Backoff policy is set to sequence 2 in previous retry.
// Server closes the ORCA reporting RPC with an error, will start backoff sequence 2 (12ns).
orcaServiceImp.calls.poll().responseObserver.onError(Status.UNAVAILABLE.asException());
assertLog(subchannel.logs,
"DEBUG: ORCA reporting stream closed with " + Status.UNAVAILABLE + ", backoff in 12"
+ " ns");
inOrder.verify(backoffPolicyProvider).get();
inOrder.verify(backoffPolicy2).nextBackoffNanos();
verifyRetryAfterNanos(inOrder, orcaServiceImp, 12);
assertLog(subchannel.logs,
"DEBUG: Starting ORCA reporting for " + subchannel.getAllAddresses());
verifyNoMoreInteractions(mockStateListener, mockOrcaListener0, backoffPolicyProvider,
backoffPolicy1, backoffPolicy2);
}
@Test
public void reportingNotStartedUntilConfigured() {
Subchannel created = createSubchannel(orcaHelper, 0, Attributes.EMPTY);
deliverSubchannelState(0, ConnectivityStateInfo.forNonError(READY));
verify(mockStateListeners[0])
.onSubchannelState(eq(ConnectivityStateInfo.forNonError(READY)));
assertThat(orcaServiceImps[0].calls).isEmpty();
assertThat(subchannels[0].logs).isEmpty();
OrcaOobUtil.setListener(created, mockOrcaListener0, SHORT_INTERVAL_CONFIG);
assertThat(orcaServiceImps[0].calls).hasSize(1);
assertLog(subchannels[0].logs,
"DEBUG: Starting ORCA reporting for " + subchannels[0].getAllAddresses());
assertThat(orcaServiceImps[0].calls.peek().request)
.isEqualTo(buildOrcaRequestFromConfig(SHORT_INTERVAL_CONFIG));
}
@Test
public void updateListenerThrows() {
Subchannel created = createSubchannel(orcaHelper, 0, Attributes.EMPTY);
OrcaOobUtil.setListener(created, mockOrcaListener0, SHORT_INTERVAL_CONFIG);
deliverSubchannelState(0, ConnectivityStateInfo.forNonError(READY));
verify(mockStateListeners[0])
.onSubchannelState(eq(ConnectivityStateInfo.forNonError(READY)));
assertThat(orcaServiceImps[0].calls).hasSize(1);
assertLog(subchannels[0].logs,
"DEBUG: Starting ORCA reporting for " + subchannels[0].getAllAddresses());
assertThat(orcaServiceImps[0].calls.peek().request)
.isEqualTo(buildOrcaRequestFromConfig(SHORT_INTERVAL_CONFIG));
assertThat(unwrap(created)).isSameInstanceAs(subchannels[0]);
try {
OrcaOobUtil.setListener(subchannels[0], mockOrcaListener1, MEDIUM_INTERVAL_CONFIG);
fail("Update orca listener on non-orca subchannel should fail");
} catch (IllegalArgumentException ex) {
assertThat(ex.getMessage()).isEqualTo("Subchannel does not have orca Out-Of-Band "
+ "stream enabled. Try to use a subchannel created by OrcaOobUtil.OrcaHelper.");
}
}
@Test
public void removeListener() {
Subchannel created = createSubchannel(orcaHelper, 0, Attributes.EMPTY);
OrcaOobUtil.setListener(created, null, SHORT_INTERVAL_CONFIG);
deliverSubchannelState(0, ConnectivityStateInfo.forNonError(READY));
verify(mockStateListeners[0])
.onSubchannelState(eq(ConnectivityStateInfo.forNonError(READY)));
assertThat(orcaServiceImps[0].calls).isEmpty();
assertThat(subchannels[0].logs).isEmpty();
assertThat(unwrap(created)).isSameInstanceAs(subchannels[0]);
OrcaOobUtil.setListener(created, mockOrcaListener0, SHORT_INTERVAL_CONFIG);
assertThat(orcaServiceImps[0].calls).hasSize(1);
assertLog(subchannels[0].logs,
"DEBUG: Starting ORCA reporting for " + subchannels[0].getAllAddresses());
assertThat(orcaServiceImps[0].calls.peek().request)
.isEqualTo(buildOrcaRequestFromConfig(SHORT_INTERVAL_CONFIG));
OrcaOobUtil.setListener(created, null, null);
assertThat(orcaServiceImps[0].calls.poll().cancelled).isTrue();
assertThat(orcaServiceImps[0].calls).isEmpty();
assertThat(subchannels[0].logs).isEmpty();
assertThat(fakeClock.getPendingTasks()).isEmpty();
verifyNoMoreInteractions(mockOrcaListener0);
verifyNoInteractions(backoffPolicyProvider);
}
@Test
public void updateReportingIntervalBeforeCreatingSubchannel() {
Subchannel created = createSubchannel(orcaHelper, 0, Attributes.EMPTY);
OrcaOobUtil.setListener(created, mockOrcaListener0, SHORT_INTERVAL_CONFIG);
deliverSubchannelState(0, ConnectivityStateInfo.forNonError(READY));
verify(mockStateListeners[0]).onSubchannelState(eq(ConnectivityStateInfo.forNonError(READY)));
assertThat(orcaServiceImps[0].calls).hasSize(1);
assertLog(subchannels[0].logs,
"DEBUG: Starting ORCA reporting for " + subchannels[0].getAllAddresses());
assertThat(orcaServiceImps[0].calls.poll().request)
.isEqualTo(buildOrcaRequestFromConfig(SHORT_INTERVAL_CONFIG));
}
@Test
public void updateReportingIntervalBeforeSubchannelReady() {
Subchannel created = createSubchannel(orcaHelper, 0, Attributes.EMPTY);
OrcaOobUtil.setListener(created, mockOrcaListener0, SHORT_INTERVAL_CONFIG);
deliverSubchannelState(0, ConnectivityStateInfo.forNonError(READY));
verify(mockStateListeners[0]).onSubchannelState(eq(ConnectivityStateInfo.forNonError(READY)));
assertThat(orcaServiceImps[0].calls).hasSize(1);
assertLog(subchannels[0].logs,
"DEBUG: Starting ORCA reporting for " + subchannels[0].getAllAddresses());
assertThat(orcaServiceImps[0].calls.poll().request)
.isEqualTo(buildOrcaRequestFromConfig(SHORT_INTERVAL_CONFIG));
}
@Test
public void updateReportingIntervalWhenRpcActive() {
// Sets report interval before creating a Subchannel, reporting starts right after suchannel
// state becomes READY.
Subchannel created = createSubchannel(orcaHelper, 0, Attributes.EMPTY);
OrcaOobUtil.setListener(created, mockOrcaListener0,
MEDIUM_INTERVAL_CONFIG);
deliverSubchannelState(0, ConnectivityStateInfo.forNonError(READY));
verify(mockStateListeners[0]).onSubchannelState(eq(ConnectivityStateInfo.forNonError(READY)));
assertThat(orcaServiceImps[0].calls).hasSize(1);
assertLog(subchannels[0].logs,
"DEBUG: Starting ORCA reporting for " + subchannels[0].getAllAddresses());
assertThat(orcaServiceImps[0].calls.peek().request)
.isEqualTo(buildOrcaRequestFromConfig(MEDIUM_INTERVAL_CONFIG));
// Make reporting less frequent.
OrcaOobUtil.setListener(created, mockOrcaListener0, LONG_INTERVAL_CONFIG);
assertThat(orcaServiceImps[0].calls.poll().cancelled).isTrue();
assertThat(orcaServiceImps[0].calls).hasSize(1);
assertLog(subchannels[0].logs,
"DEBUG: Starting ORCA reporting for " + subchannels[0].getAllAddresses());
assertThat(orcaServiceImps[0].calls.peek().request)
.isEqualTo(buildOrcaRequestFromConfig(LONG_INTERVAL_CONFIG));
// Configuring with the same report interval again does not restart ORCA RPC.
OrcaOobUtil.setListener(created, mockOrcaListener0, LONG_INTERVAL_CONFIG);
assertThat(orcaServiceImps[0].calls.peek().cancelled).isFalse();
assertThat(subchannels[0].logs).isEmpty();
// Make reporting more frequent.
OrcaOobUtil.setListener(created, mockOrcaListener0,
SHORT_INTERVAL_CONFIG);
assertThat(orcaServiceImps[0].calls.poll().cancelled).isTrue();
assertThat(orcaServiceImps[0].calls).hasSize(1);
assertLog(subchannels[0].logs,
"DEBUG: Starting ORCA reporting for " + subchannels[0].getAllAddresses());
assertThat(orcaServiceImps[0].calls.poll().request)
.isEqualTo(buildOrcaRequestFromConfig(SHORT_INTERVAL_CONFIG));
}
@Test
public void updateReportingIntervalWhenRpcPendingRetry() {
Subchannel created = createSubchannel(orcaHelper, 0, Attributes.EMPTY);
OrcaOobUtil.setListener(created, mockOrcaListener0, SHORT_INTERVAL_CONFIG);
deliverSubchannelState(0, ConnectivityStateInfo.forNonError(READY));
verify(mockStateListeners[0]).onSubchannelState(eq(ConnectivityStateInfo.forNonError(READY)));
assertThat(orcaServiceImps[0].calls).hasSize(1);
assertLog(subchannels[0].logs,
"DEBUG: Starting ORCA reporting for " + subchannels[0].getAllAddresses());
assertThat(orcaServiceImps[0].calls.peek().request)
.isEqualTo(buildOrcaRequestFromConfig(SHORT_INTERVAL_CONFIG));
// Server closes the RPC without response, client will retry with backoff.
assertThat(fakeClock.getPendingTasks()).isEmpty();
orcaServiceImps[0].calls.poll().responseObserver.onCompleted();
assertLog(subchannels[0].logs,
"DEBUG: ORCA reporting stream closed with " + Status.OK + ", backoff in 11"
+ " ns");
assertThat(fakeClock.getPendingTasks()).hasSize(1);
assertThat(orcaServiceImps[0].calls).isEmpty();
// Make reporting less frequent.
OrcaOobUtil.setListener(created, mockOrcaListener0, LONG_INTERVAL_CONFIG);
// Retry task will be canceled and restarts new RPC immediately.
assertThat(fakeClock.getPendingTasks()).isEmpty();
assertThat(orcaServiceImps[0].calls).hasSize(1);
assertLog(subchannels[0].logs,
"DEBUG: Starting ORCA reporting for " + subchannels[0].getAllAddresses());
assertThat(orcaServiceImps[0].calls.peek().request)
.isEqualTo(buildOrcaRequestFromConfig(LONG_INTERVAL_CONFIG));
}
@Test
public void policiesReceiveSameReportIndependently() {
Subchannel childSubchannel = createSubchannel(childHelper, 0, Attributes.EMPTY);
deliverSubchannelState(0, ConnectivityStateInfo.forNonError(READY));
// No helper sets ORCA reporting interval, so load reporting is not started.
verify(mockStateListeners[0]).onSubchannelState(eq(ConnectivityStateInfo.forNonError(READY)));
assertThat(orcaServiceImps[0].calls).isEmpty();
assertThat(subchannels[0].logs).isEmpty();
// Parent helper requests ORCA reports with a certain interval, load reporting starts.
OrcaOobUtil.setListener(savedParentSubchannel, mockOrcaListener1, SHORT_INTERVAL_CONFIG);
assertThat(orcaServiceImps[0].calls).hasSize(1);
assertLog(subchannels[0].logs,
"DEBUG: Starting ORCA reporting for " + subchannels[0].getAllAddresses());
OrcaLoadReport report = OrcaLoadReport.getDefaultInstance();
assertThat(orcaServiceImps[0].calls).hasSize(1);
orcaServiceImps[0].calls.peek().responseObserver.onNext(report);
assertLog(subchannels[0].logs, "DEBUG: Received an ORCA report: " + report);
// Only parent helper's listener receives the report.
ArgumentCaptor<MetricReport> parentReportCaptor = ArgumentCaptor.forClass(MetricReport.class);
verify(mockOrcaListener1).onLoadReport(parentReportCaptor.capture());
assertThat(OrcaPerRequestUtilTest.reportEqual(parentReportCaptor.getValue(),
OrcaPerRequestUtil.fromOrcaLoadReport(report))).isTrue();
verifyNoMoreInteractions(mockOrcaListener2);
// Now child helper also wants to receive reports.
OrcaOobUtil.setListener(childSubchannel, mockOrcaListener2, SHORT_INTERVAL_CONFIG);
orcaServiceImps[0].calls.peek().responseObserver.onNext(report);
assertLog(subchannels[0].logs, "DEBUG: Received an ORCA report: " + report);
// Both helper receives the same report instance.
ArgumentCaptor<MetricReport> childReportCaptor = ArgumentCaptor.forClass(MetricReport.class);
verify(mockOrcaListener1, times(2))
.onLoadReport(parentReportCaptor.capture());
verify(mockOrcaListener2)
.onLoadReport(childReportCaptor.capture());
assertThat(childReportCaptor.getValue()).isSameInstanceAs(parentReportCaptor.getValue());
}
@Test
public void reportWithMostFrequentIntervalRequested() {
Subchannel created = createSubchannel(childHelper, 0, Attributes.EMPTY);
OrcaOobUtil.setListener(created, mockOrcaListener0, LONG_INTERVAL_CONFIG);
OrcaOobUtil.setListener(created, mockOrcaListener1, SHORT_INTERVAL_CONFIG);
deliverSubchannelState(0, ConnectivityStateInfo.forNonError(READY));
verify(mockStateListeners[0]).onSubchannelState(eq(ConnectivityStateInfo.forNonError(READY)));
assertThat(orcaServiceImps[0].calls).hasSize(1);
assertLog(subchannels[0].logs,
"DEBUG: Starting ORCA reporting for " + subchannels[0].getAllAddresses());
// The real report interval to be requested is the minimum of intervals requested by helpers.
assertThat(Durations.toNanos(orcaServiceImps[0].calls.peek().request.getReportInterval()))
.isEqualTo(SHORT_INTERVAL_CONFIG.getReportIntervalNanos());
// Parent helper wants reporting to be more frequent than its current setting while it is still
// less frequent than parent helper. Nothing should happen on existing RPC.
OrcaOobUtil.setListener(savedParentSubchannel, mockOrcaListener0, MEDIUM_INTERVAL_CONFIG);
assertThat(orcaServiceImps[0].calls.peek().cancelled).isFalse();
assertThat(subchannels[0].logs).isEmpty();
// Parent helper wants reporting to be less frequent.
OrcaOobUtil.setListener(created, mockOrcaListener1, MEDIUM_INTERVAL_CONFIG);
assertThat(orcaServiceImps[0].calls.poll().cancelled).isTrue();
assertThat(orcaServiceImps[0].calls).hasSize(1);
assertLog(subchannels[0].logs,
"DEBUG: Starting ORCA reporting for " + subchannels[0].getAllAddresses());
// ORCA reporting RPC restarts and the the real report interval is adjusted.
assertThat(Durations.toNanos(orcaServiceImps[0].calls.poll().request.getReportInterval()))
.isEqualTo(MEDIUM_INTERVAL_CONFIG.getReportIntervalNanos());
}
private void verifyRetryAfterNanos(InOrder inOrder, OpenRcaServiceImp orcaServiceImp,
long nanos) {
assertThat(fakeClock.getPendingTasks()).hasSize(1);
assertThat(orcaServiceImp.calls).isEmpty();
fakeClock.forwardNanos(nanos - 1);
assertThat(orcaServiceImp.calls).isEmpty();
inOrder.verifyNoMoreInteractions();
fakeClock.forwardNanos(1);
assertThat(orcaServiceImp.calls).hasSize(1);
assertThat(fakeClock.getPendingTasks()).isEmpty();
}
private void deliverSubchannelState(final int index, final ConnectivityStateInfo newState) {
syncContext.execute(
new Runnable() {
@Override
public void run() {
subchannels[index].stateListener.onSubchannelState(newState);
}
});
}
private Subchannel createSubchannel(final Helper helper, final int index,
final Attributes attrs) {
final AtomicReference<Subchannel> newSubchannel = new AtomicReference<>();
syncContext.execute(
new Runnable() {
@Override
public void run() {
Subchannel s =
helper.createSubchannel(
CreateSubchannelArgs.newBuilder()
.setAddresses(eagLists[index])
.setAttributes(attrs)
.build());
s.start(mockStateListeners[index]);
newSubchannel.set(s);
}
});
return newSubchannel.get();
}
private void setOrcaReportConfig(
final Subchannel subchannel,
final OrcaOobReportListener listener,
final OrcaReportingConfig config) {
OrcaOobUtil.setListener(subchannel, listener, config);
}
private static final | OrcaOobUtilTest |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/filter/factory/RequestRateLimiterGatewayFilterFactory.java | {
"start": 4735,
"end": 6177
} | class ____ implements HasRouteId {
private @Nullable KeyResolver keyResolver;
private @Nullable RateLimiter rateLimiter;
private HttpStatus statusCode = HttpStatus.TOO_MANY_REQUESTS;
private @Nullable Boolean denyEmptyKey;
private @Nullable String emptyKeyStatus;
private @Nullable String routeId;
public @Nullable KeyResolver getKeyResolver() {
return keyResolver;
}
public Config setKeyResolver(KeyResolver keyResolver) {
this.keyResolver = keyResolver;
return this;
}
public @Nullable RateLimiter getRateLimiter() {
return rateLimiter;
}
public Config setRateLimiter(RateLimiter rateLimiter) {
this.rateLimiter = rateLimiter;
return this;
}
public HttpStatus getStatusCode() {
return statusCode;
}
public Config setStatusCode(HttpStatus statusCode) {
this.statusCode = statusCode;
return this;
}
public @Nullable Boolean getDenyEmptyKey() {
return denyEmptyKey;
}
public Config setDenyEmptyKey(Boolean denyEmptyKey) {
this.denyEmptyKey = denyEmptyKey;
return this;
}
public @Nullable String getEmptyKeyStatus() {
return emptyKeyStatus;
}
public Config setEmptyKeyStatus(String emptyKeyStatus) {
this.emptyKeyStatus = emptyKeyStatus;
return this;
}
@Override
public void setRouteId(String routeId) {
this.routeId = routeId;
}
@Override
public @Nullable String getRouteId() {
return this.routeId;
}
}
}
| Config |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/SavepointITCase.java | {
"start": 68068,
"end": 72112
} | class ____ {
private final int numTaskManagers;
private final int numSlotsPerTaskManager;
private final Configuration config;
private MiniClusterResourceFactory(
int numTaskManagers, int numSlotsPerTaskManager, Configuration config) {
this.numTaskManagers = numTaskManagers;
this.numSlotsPerTaskManager = numSlotsPerTaskManager;
this.config = config;
}
MiniClusterWithClientResource get() {
return new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setConfiguration(config)
.setNumberTaskManagers(numTaskManagers)
.setNumberSlotsPerTaskManager(numSlotsPerTaskManager)
.build());
}
}
private Configuration getFileBasedCheckpointsConfig(final String savepointDir) {
final Configuration config = new Configuration();
config.set(StateBackendOptions.STATE_BACKEND, "hashmap");
config.set(CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir.toURI().toString());
config.set(CheckpointingOptions.FS_SMALL_FILE_THRESHOLD, MemorySize.ZERO);
config.set(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir);
return config;
}
private Configuration getFileBasedCheckpointsConfig() {
return getFileBasedCheckpointsConfig(savepointDir.toURI().toString());
}
private static Matcher<File> hasEntropyInFileStateHandlePaths() {
return new TypeSafeDiagnosingMatcher<File>() {
@Override
protected boolean matchesSafely(
final File savepointDir, final Description mismatchDescription) {
if (savepointDir == null) {
mismatchDescription.appendText("savepoint dir must not be null");
return false;
}
final List<Path> filesWithoutEntropy =
listRecursively(
savepointDir
.toPath()
.resolve(
EntropyInjectingTestFileSystem
.ENTROPY_INJECTION_KEY));
final Path savepointDirWithEntropy =
savepointDir.toPath().resolve(EntropyInjectingTestFileSystem.ENTROPY);
final List<Path> filesWithEntropy = listRecursively(savepointDirWithEntropy);
if (!filesWithoutEntropy.isEmpty()) {
mismatchDescription.appendText(
"there are savepoint files with unresolved entropy placeholders");
return false;
}
if (!Files.exists(savepointDirWithEntropy) || filesWithEntropy.isEmpty()) {
mismatchDescription.appendText(
"there are no savepoint files with added entropy");
return false;
}
return true;
}
@Override
public void describeTo(final Description description) {
description.appendText("all savepoint files should have added entropy");
}
};
}
private static List<Path> listRecursively(final Path dir) {
try {
if (!Files.exists(dir)) {
return Collections.emptyList();
} else {
try (Stream<Path> files = Files.walk(dir, FileVisitOption.FOLLOW_LINKS)) {
return files.filter(Files::isRegularFile).collect(Collectors.toList());
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/** A test file system. It will fail when trying to perform actions on a statically set path. */
public static | MiniClusterResourceFactory |
java | quarkusio__quarkus | extensions/smallrye-reactive-messaging/deployment/src/test/java/io/quarkus/smallrye/reactivemessaging/blocking/BlockingValidationErrorTest.java | {
"start": 1123,
"end": 1471
} | class ____ {
private List<String> list = new ArrayList<>();
@Blocking
@Incoming("count")
public Subscriber<Message<String>> create() {
return ReactiveStreams.<Message<String>> builder().forEach(m -> list.add(m.getPayload()))
.build();
}
}
}
| BeanReturningASubscriberOfMessages |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/RouteSetRouteIdTwoTimesTest.java | {
"start": 1061,
"end": 1596
} | class ____ extends TestSupport {
@Test
public void testRouteIdTwice() {
CamelContext context = new DefaultCamelContext();
assertThrows(IllegalArgumentException.class, () -> {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:hello").routeId("foo").to("mock:result").to("mock:bar").routeId("bar");
}
});
}, "Should have thrown exception");
}
}
| RouteSetRouteIdTwoTimesTest |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/AbstractConfigAttributeRequestMatcherRegistryTests.java | {
"start": 2779,
"end": 3067
} | class ____
extends AbstractConfigAttributeRequestMatcherRegistry<List<RequestMatcher>> {
@Override
protected List<RequestMatcher> chainRequestMatchersInternal(List<RequestMatcher> requestMatchers) {
return requestMatchers;
}
}
}
| ConcreteAbstractRequestMatcherMappingConfigurer |
java | spring-projects__spring-boot | module/spring-boot-micrometer-tracing-brave/src/main/java/org/springframework/boot/micrometer/tracing/brave/autoconfigure/CompositePropagationFactory.java | {
"start": 1661,
"end": 5054
} | class ____ extends Propagation.Factory {
private final PropagationFactories injectors;
private final PropagationFactories extractors;
private final CompositePropagation propagation;
CompositePropagationFactory(Collection<Factory> injectorFactories, Collection<Factory> extractorFactories) {
this.injectors = new PropagationFactories(injectorFactories);
this.extractors = new PropagationFactories(extractorFactories);
this.propagation = new CompositePropagation(this.injectors, this.extractors);
}
Stream<Factory> getInjectors() {
return this.injectors.stream();
}
@Override
public boolean supportsJoin() {
return this.injectors.supportsJoin() && this.extractors.supportsJoin();
}
@Override
public boolean requires128BitTraceId() {
return this.injectors.requires128BitTraceId() || this.extractors.requires128BitTraceId();
}
@Override
public Propagation<String> get() {
return this.propagation;
}
@Override
public TraceContext decorate(TraceContext context) {
for (Propagation.Factory factory : this.injectors.factories) {
TraceContext decorated = factory.decorate(context);
if (decorated != context) {
return decorated;
}
}
for (Propagation.Factory factory : this.extractors.factories) {
TraceContext decorated = factory.decorate(context);
if (decorated != context) {
return decorated;
}
}
return context;
}
/**
* Creates a new {@link CompositePropagationFactory} which doesn't do any propagation.
* @return the {@link CompositePropagationFactory}
*/
static CompositePropagationFactory noop() {
return new CompositePropagationFactory(Collections.emptyList(), Collections.emptyList());
}
/**
* Creates a new {@link CompositePropagationFactory}.
* @param properties the propagation properties
* @return the {@link CompositePropagationFactory}
*/
static CompositePropagationFactory create(TracingProperties.Propagation properties) {
return create(properties, null, null);
}
/**
* Creates a new {@link CompositePropagationFactory}.
* @param properties the propagation properties
* @param baggageManager the baggage manager to use, or {@code null}
* @param localFields the local fields, or {@code null}
* @return the {@link CompositePropagationFactory}
*/
static CompositePropagationFactory create(TracingProperties.Propagation properties,
@Nullable BaggageManager baggageManager, @Nullable LocalBaggageFields localFields) {
PropagationFactoryMapper mapper = new PropagationFactoryMapper(baggageManager, localFields);
List<Factory> injectors = getEffectiveProducedTypes(properties).stream().map(mapper::map).toList();
List<Factory> extractors = getEffectiveConsumedTypes(properties).stream().map(mapper::map).toList();
return new CompositePropagationFactory(injectors, extractors);
}
private static List<PropagationType> getEffectiveConsumedTypes(TracingProperties.Propagation properties) {
return (properties.getType() != null) ? properties.getType() : properties.getConsume();
}
private static List<PropagationType> getEffectiveProducedTypes(TracingProperties.Propagation properties) {
return (properties.getType() != null) ? properties.getType() : properties.getProduce();
}
/**
* Mapper used to create a {@link brave.propagation.Propagation.Factory Propagation
* factory} from a {@link PropagationType}.
*/
private static | CompositePropagationFactory |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/strategy/TestingSchedulingExecutionVertex.java | {
"start": 1292,
"end": 3797
} | class ____ implements SchedulingExecutionVertex {
private final ExecutionVertexID executionVertexId;
private final List<ConsumedPartitionGroup> consumedPartitionGroups;
private final Collection<TestingSchedulingResultPartition> producedPartitions;
private final Map<IntermediateResultPartitionID, TestingSchedulingResultPartition>
resultPartitionsById;
private ExecutionState executionState;
public TestingSchedulingExecutionVertex(
JobVertexID jobVertexId, int subtaskIndex, ExecutionState executionState) {
this.executionVertexId = new ExecutionVertexID(jobVertexId, subtaskIndex);
this.consumedPartitionGroups = new ArrayList<>();
this.producedPartitions = new ArrayList<>();
this.resultPartitionsById = new HashMap<>();
this.executionState = executionState;
}
@Override
public ExecutionVertexID getId() {
return executionVertexId;
}
@Override
public ExecutionState getState() {
return executionState;
}
public void setState(ExecutionState state) {
this.executionState = state;
}
@Override
public Iterable<TestingSchedulingResultPartition> getConsumedResults() {
return IterableUtils.flatMap(consumedPartitionGroups, resultPartitionsById::get);
}
@Override
public Iterable<TestingSchedulingResultPartition> getProducedResults() {
return producedPartitions;
}
@Override
public List<ConsumedPartitionGroup> getConsumedPartitionGroups() {
return consumedPartitionGroups;
}
void addConsumedPartitionGroup(
ConsumedPartitionGroup consumedPartitionGroup,
Map<IntermediateResultPartitionID, TestingSchedulingResultPartition>
consumedResultPartitionById) {
this.consumedPartitionGroups.add(consumedPartitionGroup);
this.resultPartitionsById.putAll(consumedResultPartitionById);
}
void addProducedPartition(TestingSchedulingResultPartition partition) {
producedPartitions.add(partition);
}
public static Builder newBuilder() {
return new Builder();
}
public static TestingSchedulingExecutionVertex withExecutionVertexID(
JobVertexID jobVertexId, int subtaskIndex) {
return newBuilder().withExecutionVertexID(jobVertexId, subtaskIndex).build();
}
/** Builder for {@link TestingSchedulingExecutionVertex}. */
public static | TestingSchedulingExecutionVertex |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/pkg/jar/Decompiler.java | {
"start": 68,
"end": 404
} | interface ____ {
void init(Context context);
/**
* @return {@code true} if the decompiler was successfully download or already exists
*/
boolean downloadIfNecessary();
/**
* @return {@code true} if the decompilation process was successful
*/
boolean decompile(Path jarToDecompile);
| Decompiler |
java | apache__camel | components/camel-spring-parent/camel-spring-rabbitmq/src/test/java/org/apache/camel/component/springrabbit/integration/RabbitMQComponentNullBodyIT.java | {
"start": 1467,
"end": 2726
} | class ____ extends RabbitMQITSupport {
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
SpringRabbitMQComponent rmq = camelContext.getComponent("spring-rabbitmq", SpringRabbitMQComponent.class);
rmq.setAllowNullBody(true);
return camelContext;
}
@Test
public void testProducer() {
ConnectionFactory cf = context.getRegistry().lookupByNameAndType("myCF", ConnectionFactory.class);
Queue q = new Queue("myqueue");
TopicExchange t = new TopicExchange("foo");
AmqpAdmin admin = new RabbitAdmin(cf);
admin.declareQueue(q);
admin.declareExchange(t);
admin.declareBinding(BindingBuilder.bind(q).to(t).with("foo.bar.#"));
Assertions.assertDoesNotThrow(() -> template.sendBody("direct:start", null));
}
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start")
.to("spring-rabbitmq:foo?routingKey=foo.bar");
}
};
}
}
| RabbitMQComponentNullBodyIT |
java | apache__hadoop | hadoop-tools/hadoop-datajoin/src/main/java/org/apache/hadoop/contrib/utils/join/DataJoinJob.java | {
"start": 1852,
"end": 6002
} | class ____ {
public static Class getClassByName(String className) {
Class retv = null;
try {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
retv = Class.forName(className, true, classLoader);
} catch (Exception e) {
throw new RuntimeException(e);
}
return retv;
}
public static JobConf createDataJoinJob(String args[]) throws IOException {
String inputDir = args[0];
String outputDir = args[1];
Class inputFormat = SequenceFileInputFormat.class;
if (args[2].compareToIgnoreCase("text") != 0) {
System.out.println("Using SequenceFileInputFormat: " + args[2]);
} else {
System.out.println("Using TextInputFormat: " + args[2]);
inputFormat = TextInputFormat.class;
}
int numOfReducers = Integer.parseInt(args[3]);
Class mapper = getClassByName(args[4]);
Class reducer = getClassByName(args[5]);
Class mapoutputValueClass = getClassByName(args[6]);
Class outputFormat = TextOutputFormat.class;
Class outputValueClass = Text.class;
if (args[7].compareToIgnoreCase("text") != 0) {
System.out.println("Using SequenceFileOutputFormat: " + args[7]);
outputFormat = SequenceFileOutputFormat.class;
outputValueClass = getClassByName(args[7]);
} else {
System.out.println("Using TextOutputFormat: " + args[7]);
}
long maxNumOfValuesPerGroup = 100;
String jobName = "";
if (args.length > 8) {
maxNumOfValuesPerGroup = Long.parseLong(args[8]);
}
if (args.length > 9) {
jobName = args[9];
}
Configuration defaults = new Configuration();
JobConf job = new JobConf(defaults, DataJoinJob.class);
job.setJobName("DataJoinJob: " + jobName);
FileSystem fs = FileSystem.get(defaults);
fs.delete(new Path(outputDir), true);
FileInputFormat.setInputPaths(job, inputDir);
job.setInputFormat(inputFormat);
job.setMapperClass(mapper);
FileOutputFormat.setOutputPath(job, new Path(outputDir));
job.setOutputFormat(outputFormat);
SequenceFileOutputFormat.setOutputCompressionType(job,
SequenceFile.CompressionType.BLOCK);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(mapoutputValueClass);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(outputValueClass);
job.setReducerClass(reducer);
job.setNumMapTasks(1);
job.setNumReduceTasks(numOfReducers);
job.setLong("datajoin.maxNumOfValuesPerGroup", maxNumOfValuesPerGroup);
return job;
}
/**
* Submit/run a map/reduce job.
*
* @param job
* @return true for success
* @throws IOException
*/
public static boolean runJob(JobConf job) throws IOException {
JobClient jc = new JobClient(job);
boolean sucess = true;
RunningJob running = null;
try {
running = jc.submitJob(job);
JobID jobId = running.getID();
System.out.println("Job " + jobId + " is submitted");
while (!running.isComplete()) {
System.out.println("Job " + jobId + " is still running.");
try {
Thread.sleep(60000);
} catch (InterruptedException e) {
}
running = jc.getJob(jobId);
}
sucess = running.isSuccessful();
} finally {
if (!sucess && (running != null)) {
running.killJob();
}
jc.close();
}
return sucess;
}
/**
* @param args
*/
public static void main(String[] args) {
boolean success;
if (args.length < 8 || args.length > 10) {
System.out.println("usage: DataJoinJob " + "inputdirs outputdir map_input_file_format "
+ "numofParts " + "mapper_class " + "reducer_class "
+ "map_output_value_class "
+ "output_value_class [maxNumOfValuesPerGroup [descriptionOfJob]]]");
System.exit(-1);
}
try {
JobConf job = DataJoinJob.createDataJoinJob(args);
success = DataJoinJob.runJob(job);
if (!success) {
System.out.println("Job failed");
}
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
| DataJoinJob |
java | spring-projects__spring-framework | spring-web/src/jmh/java/org/springframework/http/support/HeadersAdaptersBaseline.java | {
"start": 16920,
"end": 17434
} | class ____ implements Entry<String, List<String>> {
private final String key;
HeaderEntry(String key) {
this.key = key;
}
@Override
public String getKey() {
return this.key;
}
@Override
public List<String> getValue() {
return headers.getAll(this.key);
}
@Override
public List<String> setValue(List<String> value) {
List<String> previousValues = headers.getAll(this.key);
headers.set(this.key, value);
return previousValues;
}
}
private | HeaderEntry |
java | apache__camel | components/camel-spring-parent/camel-spring-rabbitmq/src/test/java/org/apache/camel/component/springrabbit/integration/RabbitMQInOutIT.java | {
"start": 1201,
"end": 2922
} | class ____ extends RabbitMQITSupport {
@Test
public void testInOut() throws Exception {
getMockEndpoint("mock:input").expectedBodiesReceived("World");
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
Exchange out = template.request("direct:start", e -> e.getMessage().setBody("World"));
Assertions.assertNotNull(out);
Assertions.assertFalse(out.isFailed());
Assertions.assertEquals("Hello World", out.getMessage().getBody());
Assertions.assertNotNull(out.getMessage().getHeader(Exchange.BREADCRUMB_ID));
Object crumb = out.getMessage().getHeader(Exchange.BREADCRUMB_ID);
Object crumb2 = context.getVariable("global:mycrmb");
Assertions.assertEquals(crumb, crumb2);
MockEndpoint.assertIsSatisfied(context, 30, TimeUnit.SECONDS);
}
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
context.setUseBreadcrumb(true);
from("direct:start")
.setVariable("global:mycrmb", header(Exchange.BREADCRUMB_ID))
.to("log:request")
.to(ExchangePattern.InOut, "spring-rabbitmq:cheese?routingKey=foo.bar")
.to("log:response")
.to("mock:result");
from("spring-rabbitmq:cheese?queues=myqueue&routingKey=foo.bar")
.to("log:input")
.to("mock:input")
.transform(body().prepend("Hello "));
}
};
}
}
| RabbitMQInOutIT |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_endsWith_with_Integer_Arguments_Test.java | {
"start": 1012,
"end": 1478
} | class ____ extends ByteArrayAssertBaseTest {
@Override
protected ByteArrayAssert invoke_api_method() {
return assertions.endsWith(6, 8);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertEndsWith(getInfo(assertions), getActual(assertions), IntArrays.arrayOf(6, 8));
}
@Test
void invoke_api_like_user() {
assertThat(new byte[] { 1, 2, 3 }).endsWith(2, 3);
}
}
| ByteArrayAssert_endsWith_with_Integer_Arguments_Test |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataActionTests.java | {
"start": 2247,
"end": 6943
} | class ____ implements MlDataRemover {
public void remove(float requestsPerSec, ActionListener<Boolean> listener, BooleanSupplier isTimedOutSupplier) {
listener.onResponse(isTimedOutSupplier.getAsBoolean() == false);
}
}
@Before
public void setup() {
threadPool = new TestThreadPool("TransportDeleteExpiredDataActionTests thread pool");
Client client = mock(Client.class);
ClusterService clusterService = mock(ClusterService.class);
WritableIndexExpander.initialize(clusterService, TestIndexNameExpressionResolver.newInstance());
auditor = mock(AnomalyDetectionAuditor.class);
transportDeleteExpiredDataAction = new TransportDeleteExpiredDataAction(
threadPool,
EsExecutors.DIRECT_EXECUTOR_SERVICE,
mock(TransportService.class),
new ActionFilters(Collections.emptySet()),
client,
clusterService,
mock(JobConfigProvider.class),
mock(JobResultsProvider.class),
auditor,
Clock.systemUTC()
);
}
@After
public void teardown() {
threadPool.shutdown();
}
public void testDeleteExpiredDataIterationNoTimeout() {
final int numRemovers = randomIntBetween(2, 5);
List<MlDataRemover> removers = Stream.generate(DummyDataRemover::new).limit(numRemovers).collect(Collectors.toList());
AtomicBoolean succeeded = new AtomicBoolean();
ActionListener<DeleteExpiredDataAction.Response> finalListener = ActionTestUtils.assertNoFailureListener(
response -> succeeded.set(response.isDeleted())
);
BooleanSupplier isTimedOutSupplier = () -> false;
DeleteExpiredDataAction.Request request = new DeleteExpiredDataAction.Request(null, null);
transportDeleteExpiredDataAction.deleteExpiredData(request, removers.iterator(), 1.0f, finalListener, isTimedOutSupplier, true);
assertTrue(succeeded.get());
}
public void testDeleteExpiredDataIterationWithTimeout() {
final int numRemovers = randomIntBetween(2, 5);
AtomicInteger removersRemaining = new AtomicInteger(randomIntBetween(0, numRemovers - 1));
List<MlDataRemover> removers = Stream.generate(DummyDataRemover::new).limit(numRemovers).collect(Collectors.toList());
AtomicBoolean succeeded = new AtomicBoolean();
ActionListener<DeleteExpiredDataAction.Response> finalListener = ActionTestUtils.assertNoFailureListener(
response -> succeeded.set(response.isDeleted())
);
BooleanSupplier isTimedOutSupplier = () -> (removersRemaining.getAndDecrement() <= 0);
DeleteExpiredDataAction.Request request = new DeleteExpiredDataAction.Request(null, null);
request.setJobId("_all");
transportDeleteExpiredDataAction.deleteExpiredData(request, removers.iterator(), 1.0f, finalListener, isTimedOutSupplier, true);
assertFalse(succeeded.get());
verify(auditor, times(1)).warning(
"",
"Deleting expired ML data was cancelled after the timeout period of [8h] was exceeded. "
+ "The setting [xpack.ml.nightly_maintenance_requests_per_second] "
+ "controls the deletion rate, consider increasing the value to assist in pruning old data"
);
verifyNoMoreInteractions(auditor);
}
public void testDeleteExpiredDataIterationWithTimeout_GivenJobIds() {
final int numRemovers = randomIntBetween(2, 5);
AtomicInteger removersRemaining = new AtomicInteger(randomIntBetween(0, numRemovers - 1));
List<MlDataRemover> removers = Stream.generate(DummyDataRemover::new).limit(numRemovers).collect(Collectors.toList());
AtomicBoolean succeeded = new AtomicBoolean();
ActionListener<DeleteExpiredDataAction.Response> finalListener = ActionTestUtils.assertNoFailureListener(
response -> succeeded.set(response.isDeleted())
);
BooleanSupplier isTimedOutSupplier = () -> (removersRemaining.getAndDecrement() <= 0);
DeleteExpiredDataAction.Request request = new DeleteExpiredDataAction.Request(null, null);
request.setJobId("foo*");
request.setExpandedJobIds(new String[] { "foo1", "foo2" });
transportDeleteExpiredDataAction.deleteExpiredData(request, removers.iterator(), 1.0f, finalListener, isTimedOutSupplier, true);
assertFalse(succeeded.get());
verify(auditor, times(1)).warning(eq("foo1"), anyString());
verify(auditor, times(1)).warning(eq("foo2"), anyString());
verifyNoMoreInteractions(auditor);
}
}
| DummyDataRemover |
java | grpc__grpc-java | services/src/generated/test/grpc/io/grpc/reflection/testing/AnotherReflectableServiceGrpc.java | {
"start": 5028,
"end": 5352
} | interface ____ {
/**
*/
default void method(io.grpc.reflection.testing.Request request,
io.grpc.stub.StreamObserver<io.grpc.reflection.testing.Reply> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getMethodMethod(), responseObserver);
}
}
/**
* Base | AsyncService |
java | alibaba__nacos | common/src/test/java/com/alibaba/nacos/common/utils/JacksonUtilsTest.java | {
"start": 26879,
"end": 27792
} | class ____ extends TestOfAnnotation {
public String subField = "subField";
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
TestOfAnnotationSub that = (TestOfAnnotationSub) o;
return subField != null ? subField.equals(that.subField) : that.subField == null;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (subField != null ? subField.hashCode() : 0);
return result;
}
}
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME)
static | TestOfAnnotationSub |
java | spring-projects__spring-boot | module/spring-boot-security-oauth2-resource-server/src/test/java/org/springframework/boot/security/oauth2/server/resource/autoconfigure/servlet/OAuth2ResourceServerAutoConfigurationTests.java | {
"start": 42430,
"end": 42602
} | class ____ {
@Bean
JwtDecoder decoder() {
return mock(JwtDecoder.class);
}
}
@Configuration(proxyBeanMethods = false)
@EnableWebSecurity
static | JwtDecoderConfig |
java | spring-projects__spring-boot | cli/spring-boot-cli/src/json-shade/java/org/springframework/boot/cli/json/JSONStringer.java | {
"start": 2901,
"end": 12581
} | enum ____ {
/**
* An array with no elements requires no separators or newlines before it is
* closed.
*/
EMPTY_ARRAY,
/**
* An array with at least one value requires a comma and newline before the next
* element.
*/
NONEMPTY_ARRAY,
/**
* An object with no keys or values requires no separators or newlines before it
* is closed.
*/
EMPTY_OBJECT,
/**
* An object whose most recent element is a key. The next element must be a value.
*/
DANGLING_KEY,
/**
* An object with at least one name/value pair requires a comma and newline before
* the next element.
*/
NONEMPTY_OBJECT,
/**
* A special bracketless array needed by JSONStringer.join() and
* JSONObject.quote() only. Not used for JSON encoding.
*/
NULL
}
/**
* Unlike the original implementation, this stack isn't limited to 20 levels of
* nesting.
*/
private final List<Scope> stack = new ArrayList<>();
/**
* A string containing a full set of spaces for a single level of indentation, or null
* for no pretty printing.
*/
private final String indent;
public JSONStringer() {
this.indent = null;
}
JSONStringer(int indentSpaces) {
char[] indentChars = new char[indentSpaces];
Arrays.fill(indentChars, ' ');
this.indent = new String(indentChars);
}
/**
* Begins encoding a new array. Each call to this method must be paired with a call to
* {@link #endArray}.
* @return this stringer.
* @throws JSONException if processing of json failed
*/
public JSONStringer array() throws JSONException {
return open(Scope.EMPTY_ARRAY, "[");
}
/**
* Ends encoding the current array.
* @return this stringer.
* @throws JSONException if processing of json failed
*/
public JSONStringer endArray() throws JSONException {
return close(Scope.EMPTY_ARRAY, Scope.NONEMPTY_ARRAY, "]");
}
/**
* Begins encoding a new object. Each call to this method must be paired with a call
* to {@link #endObject}.
* @return this stringer.
* @throws JSONException if processing of json failed
*/
public JSONStringer object() throws JSONException {
return open(Scope.EMPTY_OBJECT, "{");
}
/**
* Ends encoding the current object.
* @return this stringer.
* @throws JSONException if processing of json failed
*/
public JSONStringer endObject() throws JSONException {
return close(Scope.EMPTY_OBJECT, Scope.NONEMPTY_OBJECT, "}");
}
/**
* Enters a new scope by appending any necessary whitespace and the given bracket.
* @param empty any necessary whitespace
* @param openBracket the open bracket
* @return this object
* @throws JSONException if processing of json failed
*/
JSONStringer open(Scope empty, String openBracket) throws JSONException {
if (this.stack.isEmpty() && !this.out.isEmpty()) {
throw new JSONException("Nesting problem: multiple top-level roots");
}
beforeValue();
this.stack.add(empty);
this.out.append(openBracket);
return this;
}
/**
* Closes the current scope by appending any necessary whitespace and the given
* bracket.
* @param empty any necessary whitespace
* @param nonempty the current scope
* @param closeBracket the close bracket
* @return the JSON stringer
* @throws JSONException if processing of json failed
*/
JSONStringer close(Scope empty, Scope nonempty, String closeBracket) throws JSONException {
Scope context = peek();
if (context != nonempty && context != empty) {
throw new JSONException("Nesting problem");
}
this.stack.remove(this.stack.size() - 1);
if (context == nonempty) {
newline();
}
this.out.append(closeBracket);
return this;
}
/**
* Returns the value on the top of the stack.
* @return the scope
* @throws JSONException if processing of json failed
*/
private Scope peek() throws JSONException {
if (this.stack.isEmpty()) {
throw new JSONException("Nesting problem");
}
return this.stack.get(this.stack.size() - 1);
}
/**
* Replace the value on the top of the stack with the given value.
* @param topOfStack the scope at the top of the stack
*/
private void replaceTop(Scope topOfStack) {
this.stack.set(this.stack.size() - 1, topOfStack);
}
/**
* Encodes {@code value}.
* @param value a {@link JSONObject}, {@link JSONArray}, String, Boolean, Integer,
* Long, Double or null. May not be {@link Double#isNaN() NaNs} or
* {@link Double#isInfinite() infinities}.
* @return this stringer.
* @throws JSONException if processing of json failed
*/
public JSONStringer value(Object value) throws JSONException {
if (this.stack.isEmpty()) {
throw new JSONException("Nesting problem");
}
if (value instanceof JSONArray) {
((JSONArray) value).writeTo(this);
return this;
}
else if (value instanceof JSONObject) {
((JSONObject) value).writeTo(this);
return this;
}
beforeValue();
if (value == null || value instanceof Boolean || value == JSONObject.NULL) {
this.out.append(value);
}
else if (value instanceof Number) {
this.out.append(JSONObject.numberToString((Number) value));
}
else {
string(value.toString());
}
return this;
}
/**
* Encodes {@code value} to this stringer.
* @param value the value to encode
* @return this stringer.
* @throws JSONException if processing of json failed
*/
public JSONStringer value(boolean value) throws JSONException {
if (this.stack.isEmpty()) {
throw new JSONException("Nesting problem");
}
beforeValue();
this.out.append(value);
return this;
}
/**
* Encodes {@code value} to this stringer.
* @param value a finite value. May not be {@link Double#isNaN() NaNs} or
* {@link Double#isInfinite() infinities}.
* @return this stringer.
* @throws JSONException if processing of json failed
*/
public JSONStringer value(double value) throws JSONException {
if (this.stack.isEmpty()) {
throw new JSONException("Nesting problem");
}
beforeValue();
this.out.append(JSONObject.numberToString(value));
return this;
}
/**
* Encodes {@code value} to this stringer.
* @param value the value to encode
* @return this stringer.
* @throws JSONException if processing of json failed
*/
public JSONStringer value(long value) throws JSONException {
if (this.stack.isEmpty()) {
throw new JSONException("Nesting problem");
}
beforeValue();
this.out.append(value);
return this;
}
private void string(String value) {
this.out.append("\"");
for (int i = 0, length = value.length(); i < length; i++) {
char c = value.charAt(i);
/*
* From RFC 4627, "All Unicode characters may be placed within the quotation
* marks except for the characters that must be escaped: quotation mark,
* reverse solidus, and the control characters (U+0000 through U+001F)."
*/
switch (c) {
case '"', '\\', '/' -> this.out.append('\\').append(c);
case '\t' -> this.out.append("\\t");
case '\b' -> this.out.append("\\b");
case '\n' -> this.out.append("\\n");
case '\r' -> this.out.append("\\r");
case '\f' -> this.out.append("\\f");
default -> {
if (c <= 0x1F) {
this.out.append(String.format("\\u%04x", (int) c));
}
else {
this.out.append(c);
}
}
}
}
this.out.append("\"");
}
private void newline() {
if (this.indent == null) {
return;
}
this.out.append("\n");
this.out.append(this.indent.repeat(this.stack.size()));
}
/**
* Encodes the key (property name) to this stringer.
* @param name the name of the forthcoming value. May not be null.
* @return this stringer.
* @throws JSONException if processing of json failed
*/
public JSONStringer key(String name) throws JSONException {
if (name == null) {
throw new JSONException("Names must be non-null");
}
beforeKey();
string(name);
return this;
}
/**
* Inserts any necessary separators and whitespace before a name. Also adjusts the
* stack to expect the key's value.
* @throws JSONException if processing of json failed
*/
private void beforeKey() throws JSONException {
Scope context = peek();
if (context == Scope.NONEMPTY_OBJECT) { // first in object
this.out.append(',');
}
else if (context != Scope.EMPTY_OBJECT) { // not in an object!
throw new JSONException("Nesting problem");
}
newline();
replaceTop(Scope.DANGLING_KEY);
}
/**
* Inserts any necessary separators and whitespace before a literal value, inline
* array, or inline object. Also adjusts the stack to expect either a closing bracket
* or another element.
* @throws JSONException if processing of json failed
*/
private void beforeValue() throws JSONException {
if (this.stack.isEmpty()) {
return;
}
Scope context = peek();
if (context == Scope.EMPTY_ARRAY) { // first in array
replaceTop(Scope.NONEMPTY_ARRAY);
newline();
}
else if (context == Scope.NONEMPTY_ARRAY) { // another in array
this.out.append(',');
newline();
}
else if (context == Scope.DANGLING_KEY) { // value for key
this.out.append(this.indent == null ? ":" : ": ");
replaceTop(Scope.NONEMPTY_OBJECT);
}
else if (context != Scope.NULL) {
throw new JSONException("Nesting problem");
}
}
/**
* Returns the encoded JSON string.
* <p>
* If invoked with unterminated arrays or unclosed objects, this method's return value
* is undefined.
* <p>
* <strong>Warning:</strong> although it contradicts the general contract of
* {@link Object#toString}, this method returns null if the stringer contains no data.
* @return the encoded JSON string.
*/
@Override
public String toString() {
return this.out.isEmpty() ? null : this.out.toString();
}
}
| Scope |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/method/configuration/PrePostMethodSecurityConfigurationTests.java | {
"start": 77378,
"end": 77529
} | class ____ {
String method() {
return "ok";
}
}
@PreAuthorize("denyAll()")
@Secured("DENIED")
@DenyAll
static | AbstractClassWithNoAnnotations |
java | google__guice | core/test/com/google/inject/MembersInjectorTest.java | {
"start": 13902,
"end": 14017
} | class ____ {
@Inject MembersInjector<A<Unimplemented>> aMembersInjector;
}
static | InjectsBrokenMembersInjector |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/dev/ConfigureDisableInstrumentationBuildStep.java | {
"start": 255,
"end": 1453
} | class ____ {
@BuildStep
ServiceStartBuildItem configure(List<DisableInstrumentationForIndexPredicateBuildItem> forIndexItems,
List<DisableInstrumentationForClassPredicateBuildItem> forClassItems) {
if (forClassItems.isEmpty() && forIndexItems.isEmpty()) {
return null;
}
RuntimeUpdatesProcessor processor = RuntimeUpdatesProcessor.INSTANCE;
if (processor != null) {
processor.setDisableInstrumentationForIndexPredicate(determineEffectivePredicate(forIndexItems))
.setDisableInstrumentationForClassPredicate(determineEffectivePredicate(forClassItems));
}
return null;
}
private <T> Predicate<T> determineEffectivePredicate(List<? extends Supplier<Predicate<T>>> suppliers) {
if (suppliers.isEmpty()) {
return new AlwaysFalsePredicate<>();
} else {
if (suppliers.size() == 1) {
return suppliers.get(0).get();
} else {
return suppliers.stream().map(Supplier::get)
.reduce((c) -> false, Predicate::or);
}
}
}
}
| ConfigureDisableInstrumentationBuildStep |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/observers/BasicFuseableObserverTest.java | {
"start": 942,
"end": 2404
} | class ____ extends RxJavaTest {
@Test(expected = UnsupportedOperationException.class)
public void offer() {
TestObserverEx<Integer> to = new TestObserverEx<>();
BasicFuseableObserver<Integer, Integer> o = new BasicFuseableObserver<Integer, Integer>(to) {
@Nullable
@Override
public Integer poll() throws Exception {
return null;
}
@Override
public int requestFusion(int mode) {
return 0;
}
@Override
public void onNext(Integer value) {
}
@Override
protected boolean beforeDownstream() {
return false;
}
};
o.onSubscribe(Disposable.disposed());
to.assertNotSubscribed();
o.offer(1);
}
@Test(expected = UnsupportedOperationException.class)
public void offer2() {
BasicFuseableObserver<Integer, Integer> o = new BasicFuseableObserver<Integer, Integer>(new TestObserver<>()) {
@Nullable
@Override
public Integer poll() throws Exception {
return null;
}
@Override
public int requestFusion(int mode) {
return 0;
}
@Override
public void onNext(Integer value) {
}
};
o.offer(1, 2);
}
}
| BasicFuseableObserverTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/BigNumbersDeserTest.java | {
"start": 768,
"end": 4404
} | class ____ {
public BigDecimal number;
}
/*
/**********************************************************
/* Test methods
/**********************************************************
*/
private final ObjectMapper MAPPER = newJsonMapper();
private ObjectMapper newJsonMapperWithUnlimitedNumberSizeSupport() {
JsonFactory jsonFactory = JsonFactory.builder()
.streamReadConstraints(StreamReadConstraints.builder().maxNumberLength(Integer.MAX_VALUE).build())
.build();
return JsonMapper.builder(jsonFactory).build();
}
@Test
public void testDouble() throws Exception
{
try {
MAPPER.readValue(generateJson("d"), DoubleWrapper.class);
fail("expected StreamReadException");
} catch (StreamConstraintsException e) {
verifyException(e, "Number value length", "exceeds the maximum allowed");
}
}
@Test
public void testDoubleUnlimited() throws Exception
{
DoubleWrapper dw =
newJsonMapperWithUnlimitedNumberSizeSupport().readValue(generateJson("d"), DoubleWrapper.class);
assertNotNull(dw);
}
@Test
public void testBigDecimal() throws Exception
{
try {
MAPPER.readValue(generateJson("number"), BigDecimalWrapper.class);
fail("expected StreamReadException");
} catch (StreamConstraintsException e) {
verifyException(e, "Number value length ", "exceeds the maximum allowed");
}
}
@Test
public void testBigDecimalUnlimited() throws Exception
{
BigDecimalWrapper bdw =
newJsonMapperWithUnlimitedNumberSizeSupport()
.readValue(generateJson("number"), BigDecimalWrapper.class);
assertNotNull(bdw);
}
@Test
public void testBigInteger() throws Exception
{
try {
MAPPER.readValue(generateJson("number"), BigIntegerWrapper.class);
fail("expected StreamReadException");
} catch (StreamConstraintsException e) {
verifyException(e, "Number value length", "exceeds the maximum allowed");
}
}
@Test
public void testBigIntegerUnlimited() throws Exception
{
BigIntegerWrapper bdw =
newJsonMapperWithUnlimitedNumberSizeSupport()
.readValue(generateJson("number"), BigIntegerWrapper.class);
assertNotNull(bdw);
}
// [databind#4435]
@Test
public void testNumberStartingWithDot() throws Exception {
_testNumberWith(".555555555555555555555555555555");
_testNumberWith("-.555555555555555555555555555555");
_testNumberWith("+.555555555555555555555555555555");
}
// [databind#4577]
@Test
public void testNumberEndingWithDot() throws Exception {
_testNumberWith("55.");
_testNumberWith("-55.");
_testNumberWith("+55.");
}
private void _testNumberWith(String num) throws Exception
{
BigDecimal exp = new BigDecimal(num);
BigDecimalWrapper w = MAPPER.readValue("{\"number\":\"" + num + "\"}", BigDecimalWrapper.class);
assertEquals(exp, w.number);
}
private String generateJson(final String fieldName) {
final int len = 1200;
final StringBuilder sb = new StringBuilder();
sb.append("{\"")
.append(fieldName)
.append("\": ");
for (int i = 0; i < len; i++) {
sb.append(1);
}
sb.append("}");
return sb.toString();
}
}
| BigDecimalWrapper |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/eval/EvalTest_gt.java | {
"start": 267,
"end": 2070
} | class ____ extends TestCase {
public void test_long() throws Exception {
assertEquals(false, SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "? > ?", (long) 1, (byte) 2));
}
public void test_int() throws Exception {
assertEquals(false, SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "? > ?", (int) 1, (byte) 2));
}
public void test_short() throws Exception {
assertEquals(false, SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "? > ?", (short) 1, (byte) 2));
}
public void test_byte() throws Exception {
assertEquals(false, SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "? > ?", (byte) 1, (byte) 2));
}
public void test_BigInteger() throws Exception {
assertEquals(false, SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "?>?", BigInteger.ONE, (byte) 2));
}
public void test_BigDecimal() throws Exception {
assertEquals(false, SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "?>?", BigDecimal.ONE, (byte) 2));
}
public void test_float() throws Exception {
assertEquals(false, SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "?>?", (float) 1, (byte) 2));
}
public void test_double() throws Exception {
assertEquals(false, SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "?>?", (double) 1, (byte) 2));
}
public void test_String() throws Exception {
assertEquals(false, SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "?>?", "1", "2"));
}
public void test_Date() throws Exception {
assertEquals(false,
SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "?>?",
new Date(System.currentTimeMillis() - 10),
new Date(System.currentTimeMillis())));
}
}
| EvalTest_gt |
java | google__guava | android/guava/src/com/google/common/util/concurrent/ClosingFuture.java | {
"start": 54854,
"end": 55675
} | interface ____<V extends @Nullable Object> {
/**
* Computes a result, or throws an exception if unable to do so.
*
* <p>Any objects that are passed to {@link DeferredCloser#eventuallyClose(Object, Executor)
* closer.eventuallyClose()} will be closed when the {@link ClosingFuture} pipeline is done
* (but not before this method completes), even if this method throws or the pipeline is
* cancelled.
*
* @param peeker used to get the value of any of the input futures
*/
@ParametricNullness
V call(DeferredCloser closer, Peeker peeker) throws Exception;
}
/**
* An operation that returns a {@link ClosingFuture} result and may throw an exception.
*
* @param <V> the type of the result
*/
public | CombiningCallable |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/EqualTest_unary_mysql.java | {
"start": 245,
"end": 1230
} | class ____ extends TestCase {
public void test_exits() throws Exception {
String sql = "-a";
String sql_c = "-(a+1 + +(b+1))";
SQLUnaryExpr exprA, exprB, exprC;
{
MySqlExprParser parser = new MySqlExprParser(sql);
exprA = (SQLUnaryExpr) parser.expr();
}
{
MySqlExprParser parser = new MySqlExprParser(sql);
exprB = (SQLUnaryExpr) parser.expr();
}
{
MySqlExprParser parser = new MySqlExprParser(sql_c);
exprC = (SQLUnaryExpr) parser.expr();
}
assertEquals(exprA, exprB);
assertNotEquals(exprA, exprC);
assertTrue(exprA.equals(exprA));
assertFalse(exprA.equals(new Object()));
assertEquals(exprA.hashCode(), exprB.hashCode());
assertEquals(new SQLUnaryExpr(), new SQLUnaryExpr());
assertEquals(new SQLUnaryExpr().hashCode(), new SQLUnaryExpr().hashCode());
}
}
| EqualTest_unary_mysql |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/inheritance/joined/JoinedSubclassDuplicateFieldsWithTreatTest.java | {
"start": 1139,
"end": 2585
} | class ____ {
@Test
public void testRestrictedTreat(SessionFactoryScope scope) {
// SINGLE_TABLE
scope.inTransaction( (session) -> {
final String qry = "from Vendor v where treat(v as DomesticVendor).name = 'Spacely'";
final List<Vendor> vendors = session.createQuery( qry, Vendor.class ).getResultList();
assertThat( vendors ).isEmpty();
} );
// JOINED
scope.inTransaction( (session) -> {
final String qry = "from Payment p where treat(p as CardPayment).transactionId = 123";
final List<Payment> payments = session.createQuery( qry, Payment.class ).getResultList();
assertThat( payments ).hasSize( 1 );
assertThat( payments.get( 0 ) ).isInstanceOf( CardPayment.class );
} );
}
@BeforeEach
void createTestData(SessionFactoryScope sessions) {
sessions.inTransaction( (session) -> {
// SINGLE_TABLE
final DomesticVendor acme = new DomesticVendor( 1, "Acme", "Acme, LLC" );
final ForeignVendor spacely = new ForeignVendor( 2, "Spacely", "Spacely Space Sprockets, Inc" );
session.persist( acme );
session.persist( spacely );
// JOINED
final CardPayment cardPayment = new CardPayment( 1, 123, 123L, "USD" );
final CashPayment cashPayment = new CashPayment( 2, 789L, "USD" );
session.persist( cardPayment );
session.persist( cashPayment );
} );
}
@AfterEach
void dropTestData(SessionFactoryScope sessions) {
sessions.dropData();
}
}
| JoinedSubclassDuplicateFieldsWithTreatTest |
java | google__dagger | javatests/dagger/internal/codegen/ProductionGraphValidationTest.java | {
"start": 14099,
"end": 14551
} | class ____ {",
" @Produces A a() {",
" return null;",
" }",
"",
" @Produces ListenableFuture<String> str() {",
" return null;",
" }",
" }",
"",
" @ProductionComponent(",
" modules = {ExecutorModule.class, MonitoringModule.class, StringModule.class}",
" )",
" | StringModule |
java | apache__camel | components/camel-dhis2/camel-dhis2-component/src/test/java/org/apache/camel/component/dhis2/Dhis2ResourceTablesIT.java | {
"start": 1404,
"end": 2645
} | class ____ extends AbstractDhis2TestSupport {
private static final Logger LOG = LoggerFactory.getLogger(Dhis2ResourceTablesIT.class);
private static final String PATH_PREFIX
= Dhis2ApiCollection.getCollection().getApiName(Dhis2ResourceTablesApiMethod.class).getName();
@Test
public void testAnalytics() {
final Map<String, Object> headers = new HashMap<String, Object>();
// parameter type is Boolean
headers.put("CamelDhis2.skipAggregate", false);
// parameter type is Boolean
headers.put("CamelDhis2.skipEvents", false);
// parameter type is Integer
headers.put("CamelDhis2.lastYears", 2);
// parameter type is Integer
headers.put("CamelDhis2.interval", 10000);
Assertions.assertDoesNotThrow(() -> requestBodyAndHeaders("direct://ANALYTICS", null, headers));
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() {
// test route for analytics
from("direct://ANALYTICS")
.to("dhis2://" + PATH_PREFIX + "/analytics");
}
};
}
}
| Dhis2ResourceTablesIT |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/type/RecursiveTypeTest.java | {
"start": 1727,
"end": 3691
} | class ____ extends HashMap<String, DataDefinition> {
public DataDefinition definition;
public DataDefinition elements;
public String regex;
public boolean required;
public String type;
}
private final ObjectMapper MAPPER = newJsonMapper();
// [databind#938]
@Test
public void testRecursivePair() throws Exception
{
JavaType t = MAPPER.constructType(ImmutablePair.class);
assertNotNull(t);
assertEquals(ImmutablePair.class, t.getRawClass());
List<ImmutablePair<String, Double>> list = new ArrayList<ImmutablePair<String, Double>>();
list.add(ImmutablePair.of("Hello World!", 123d));
String json = MAPPER.writeValueAsString(list);
assertNotNull(json);
// cannot deserialize with current definition, however
}
// for [databind#1301]
@Test
public void testJavaTypeToString() throws Exception
{
TypeFactory tf = MAPPER.getTypeFactory();
String desc = tf.constructType(DataDefinition.class).toString();
assertNotNull(desc);
// could try comparing exact message, but since it's informational try looser:
if (!desc.contains("map type")) {
fail("Description should contain 'map type', did not: "+desc);
}
if (!desc.contains("recursive type")) {
fail("Description should contain 'recursive type', did not: "+desc);
}
}
// for [databind#1647]
@Test
public void testSuperClassWithReferencedJavaType() {
TypeFactory tf = MAPPER.getTypeFactory();
tf.constructType(Base.class); // must be constructed before sub to set the cache correctly
JavaType subType = tf.constructType(Sub.class);
// baseTypeFromSub should be a ResolvedRecursiveType in this test
JavaType baseTypeFromSub = subType.getSuperClass();
assertNotNull(baseTypeFromSub.getSuperClass());
}
}
| DataDefinition |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java | {
"start": 12516,
"end": 28107
} | enum ____ {
NO_IGNORED_SOURCE {
@Override
public Map<String, List<NameValue>> loadAllIgnoredFields(SourceFilter filter, Map<String, List<Object>> storedFields) {
return Map.of();
}
@Override
public Map<String, List<NameValue>> loadSingleIgnoredField(Set<String> fieldPaths, Map<String, List<Object>> storedFields) {
return Map.of();
}
@Override
public void writeIgnoredFields(Collection<NameValue> ignoredFieldValues) {
assert false : "cannot write " + ignoredFieldValues.size() + " values with format NO_IGNORED_SOURCE";
}
@Override
public BytesRef filterValue(BytesRef value, Function<Map<String, Object>, Map<String, Object>> filter) {
assert false : "cannot filter ignored source with format NO_IGNORED_SOURCE";
return null;
}
},
LEGACY_SINGLE_IGNORED_SOURCE {
@Override
public Map<String, List<NameValue>> loadAllIgnoredFields(SourceFilter filter, Map<String, List<Object>> storedFields) {
Map<String, List<NameValue>> objectsWithIgnoredFields = null;
List<Object> storedValues = storedFields.get(NAME);
if (storedValues != null) {
for (Object value : storedValues) {
if (objectsWithIgnoredFields == null) {
objectsWithIgnoredFields = new HashMap<>();
}
NameValue nameValue = LegacyIgnoredSourceEncoding.decode(value);
if (filter != null
&& filter.isPathFiltered(nameValue.name(), XContentDataHelper.isEncodedObject(nameValue.value()))) {
// This path is filtered by the include/exclude rules
continue;
}
objectsWithIgnoredFields.computeIfAbsent(nameValue.getParentFieldName(), k -> new ArrayList<>()).add(nameValue);
}
}
return objectsWithIgnoredFields;
}
@Override
public Map<String, List<NameValue>> loadSingleIgnoredField(Set<String> fieldPaths, Map<String, List<Object>> storedFields) {
Map<String, List<NameValue>> valuesForFieldAndParents = new HashMap<>();
var ignoredSource = storedFields.get(NAME);
if (ignoredSource != null) {
for (Object value : ignoredSource) {
NameValue nameValue = LegacyIgnoredSourceEncoding.decode(value);
if (fieldPaths.contains(nameValue.name())) {
valuesForFieldAndParents.computeIfAbsent(nameValue.name(), k -> new ArrayList<>()).add(nameValue);
}
}
}
return valuesForFieldAndParents;
}
@Override
public void writeIgnoredFields(Collection<NameValue> ignoredFieldValues) {
for (NameValue nameValue : ignoredFieldValues) {
nameValue.doc().add(new StoredField(NAME, LegacyIgnoredSourceEncoding.encode(nameValue)));
}
}
@Override
public BytesRef filterValue(BytesRef value, Function<Map<String, Object>, Map<String, Object>> filter) throws IOException {
// for _ignored_source, parse, filter out the field and its contents, and serialize back downstream
IgnoredSourceFieldMapper.MappedNameValue mappedNameValue = LegacyIgnoredSourceEncoding.decodeAsMap(value);
Map<String, Object> transformedField = filter.apply(mappedNameValue.map());
if (transformedField.isEmpty()) {
// All values were filtered
return null;
}
// The unfiltered map contains at least one element, the field name with its value. If the field contains
// an object or an array, the value of the first element is a map or a list, respectively. Otherwise,
// it's a single leaf value, e.g. a string or a number.
var topValue = mappedNameValue.map().values().iterator().next();
if (topValue instanceof Map<?, ?> || topValue instanceof List<?>) {
// The field contains an object or an array, reconstruct it from the transformed map in case
// any subfield has been filtered out.
return LegacyIgnoredSourceEncoding.encodeFromMap(mappedNameValue.withMap(transformedField));
} else {
// The field contains a leaf value, and it hasn't been filtered out. It is safe to propagate the original value.
return value;
}
}
},
COALESCED_SINGLE_IGNORED_SOURCE {
@Override
public Map<String, List<NameValue>> loadAllIgnoredFields(SourceFilter filter, Map<String, List<Object>> storedFields) {
Map<String, List<NameValue>> objectsWithIgnoredFields = null;
var ignoredSource = storedFields.get(NAME);
if (ignoredSource == null) {
return objectsWithIgnoredFields;
}
for (var ignoredSourceEntry : ignoredSource) {
if (objectsWithIgnoredFields == null) {
objectsWithIgnoredFields = new HashMap<>();
}
@SuppressWarnings("unchecked")
List<NameValue> nameValues = (ignoredSourceEntry instanceof List<?>)
? (List<NameValue>) ignoredSourceEntry
: CoalescedIgnoredSourceEncoding.decode((BytesRef) ignoredSourceEntry);
assert nameValues.isEmpty() == false;
for (var nameValue : nameValues) {
if (filter != null
&& filter.isPathFiltered(nameValue.name(), XContentDataHelper.isEncodedObject(nameValue.value()))) {
// This path is filtered by the include/exclude rules
continue;
}
objectsWithIgnoredFields.computeIfAbsent(nameValue.getParentFieldName(), k -> new ArrayList<>()).add(nameValue);
}
}
return objectsWithIgnoredFields;
}
@Override
public Map<String, List<NameValue>> loadSingleIgnoredField(Set<String> fieldPaths, Map<String, List<Object>> storedFields) {
Map<String, List<NameValue>> valuesForFieldAndParents = new HashMap<>();
var ignoredSource = storedFields.get(NAME);
if (ignoredSource == null) {
return valuesForFieldAndParents;
}
for (var ignoredSourceEntry : ignoredSource) {
@SuppressWarnings("unchecked")
List<NameValue> nameValues = (ignoredSourceEntry instanceof List<?>)
? (List<NameValue>) ignoredSourceEntry
: CoalescedIgnoredSourceEncoding.decode((BytesRef) ignoredSourceEntry);
assert nameValues.isEmpty() == false;
String fieldPath = nameValues.getFirst().name();
if (fieldPaths.contains(fieldPath)) {
assert valuesForFieldAndParents.containsKey(fieldPath) == false;
valuesForFieldAndParents.put(fieldPath, nameValues);
}
}
return valuesForFieldAndParents;
}
@Override
public void writeIgnoredFields(Collection<NameValue> ignoredFieldValues) {
Map<LuceneDocument, Map<String, List<NameValue>>> entriesMap = new HashMap<>();
for (NameValue nameValue : ignoredFieldValues) {
entriesMap.computeIfAbsent(nameValue.doc(), d -> new HashMap<>())
.computeIfAbsent(nameValue.name(), n -> new ArrayList<>())
.add(nameValue);
}
for (var docEntry : entriesMap.entrySet()) {
for (var fieldEntry : docEntry.getValue().entrySet()) {
docEntry.getKey().add(new StoredField(NAME, CoalescedIgnoredSourceEncoding.encode(fieldEntry.getValue())));
}
}
}
@Override
public BytesRef filterValue(BytesRef value, Function<Map<String, Object>, Map<String, Object>> filter) throws IOException {
List<IgnoredSourceFieldMapper.MappedNameValue> mappedNameValues = CoalescedIgnoredSourceEncoding.decodeAsMap(value);
List<IgnoredSourceFieldMapper.MappedNameValue> filteredNameValues = new ArrayList<>(mappedNameValues.size());
boolean maybeDidFilter = false;
for (var mappedNameValue : mappedNameValues) {
Map<String, Object> transformedField = filter.apply(mappedNameValue.map());
if (transformedField.isEmpty()) {
maybeDidFilter = true;
continue;
}
var topValue = mappedNameValue.map().values().iterator().next();
if (topValue instanceof Map<?, ?> || topValue instanceof List<?>) {
// The field contains an object or an array in which some subfield may have been filtered out
maybeDidFilter = true;
}
filteredNameValues.add(mappedNameValue.withMap(transformedField));
}
if (maybeDidFilter) {
if (filteredNameValues.isEmpty()) {
// All values were filtered
return null;
} else {
return CoalescedIgnoredSourceEncoding.encodeFromMap(filteredNameValues);
}
} else {
// The field contains a leaf value, and it hasn't been filtered out. It is safe to propagate the original value.
return value;
}
}
};
public abstract Map<String, List<NameValue>> loadAllIgnoredFields(SourceFilter filter, Map<String, List<Object>> storedFields);
public abstract Map<String, List<NameValue>> loadSingleIgnoredField(Set<String> fieldPaths, Map<String, List<Object>> storedFields);
public abstract void writeIgnoredFields(Collection<NameValue> ignoredFieldValues);
public abstract BytesRef filterValue(BytesRef value, Function<Map<String, Object>, Map<String, Object>> filter) throws IOException;
}
public IgnoredSourceFormat ignoredSourceFormat() {
return ignoredSourceFormat(indexSettings.getIndexVersionCreated());
}
public static IgnoredSourceFormat ignoredSourceFormat(IndexVersion indexCreatedVersion) {
IndexVersion switchToNewFormatVersion = COALESCE_IGNORED_SOURCE_ENTRIES.isEnabled()
? IndexVersions.IGNORED_SOURCE_COALESCED_ENTRIES_WITH_FF
: IndexVersions.IGNORED_SOURCE_COALESCED_ENTRIES;
return indexCreatedVersion.onOrAfter(switchToNewFormatVersion)
? IgnoredSourceFormat.COALESCED_SINGLE_IGNORED_SOURCE
: IgnoredSourceFormat.LEGACY_SINGLE_IGNORED_SOURCE;
}
@Override
protected SyntheticSourceSupport syntheticSourceSupport() {
// This loader controls if this field is loaded in scope of synthetic source constructions.
// In rare cases decoding values stored in this field can fail leading to entire source
// not being available.
// We would like to have an option to lose some values in synthetic source
// but have search not fail.
return new SyntheticSourceSupport.Native(() -> new SourceLoader.SyntheticFieldLoader() {
@Override
public Stream<Map.Entry<String, StoredFieldLoader>> storedFieldLoaders() {
if (indexSettings.getSkipIgnoredSourceRead()) {
return Stream.empty();
}
// Values are handled in `SourceLoader`.
return Stream.of(Map.entry(NAME, (v) -> {}));
}
@Override
public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException {
return null;
}
@Override
public boolean hasValue() {
return false;
}
@Override
public void write(XContentBuilder b) throws IOException {
}
@Override
public String fieldName() {
// Does not really matter.
return NAME;
}
@Override
public void reset() {
}
});
}
/**
* A parsed NameValue alongside its value decoded to a map of maps that corresponds to the field-value subtree. There is only a single
* pair at the top level, with the key corresponding to the field name. If the field contains a single value, the map contains a single
* key-value pair. Otherwise, the value of the first pair will be another map etc.
*/
public record MappedNameValue(NameValue nameValue, XContentType type, Map<String, Object> map) {
public MappedNameValue withMap(Map<String, Object> map) {
return new MappedNameValue(new NameValue(nameValue.name, nameValue.parentOffset, null, nameValue.doc), type, map);
}
}
private static MappedNameValue nameValueToMapped(NameValue nameValue) throws IOException {
XContentBuilder xContentBuilder = XContentBuilder.builder(XContentDataHelper.getXContentType(nameValue.value()).xContent());
xContentBuilder.startObject().field(nameValue.name());
XContentDataHelper.decodeAndWrite(xContentBuilder, nameValue.value());
xContentBuilder.endObject();
Tuple<XContentType, Map<String, Object>> result = XContentHelper.convertToMap(BytesReference.bytes(xContentBuilder), true);
return new MappedNameValue(nameValue, result.v1(), result.v2());
}
private static NameValue mappedToNameValue(MappedNameValue mappedNameValue) throws IOException {
// The first entry is the field name, we skip to get to the value to encode.
assert mappedNameValue.map.size() == 1;
Object content = mappedNameValue.map.values().iterator().next();
// Check if the field contains a single value or an object.
@SuppressWarnings("unchecked")
XContentBuilder xContentBuilder = (content instanceof Map<?, ?> objectMap)
? XContentBuilder.builder(mappedNameValue.type().xContent()).map((Map<String, ?>) objectMap)
: XContentBuilder.builder(mappedNameValue.type().xContent()).value(content);
// Clone the NameValue with the updated value.
NameValue oldNameValue = mappedNameValue.nameValue();
return new NameValue(
oldNameValue.name(),
oldNameValue.parentOffset(),
XContentDataHelper.encodeXContentBuilder(xContentBuilder),
oldNameValue.doc()
);
}
}
| IgnoredSourceFormat |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/eval/EvalMethodLogTest.java | {
"start": 185,
"end": 1126
} | class ____ extends TestCase {
public void test_reverse() throws Exception {
assertEquals(Math.log(1), SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "log(1)"));
assertEquals(Math.log(1.001), SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "log(1.001)"));
assertEquals(Math.log(0), SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "log(0)"));
}
public void test_error() throws Exception {
Exception error = null;
try {
SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "log()", 12L);
} catch (Exception e) {
error = e;
}
assertNotNull(error);
}
public void test_error_1() throws Exception {
Exception error = null;
try {
SQLEvalVisitorUtils.evalExpr(JdbcConstants.MYSQL, "log(a)");
} catch (Exception e) {
error = e;
}
assertNotNull(error);
}
}
| EvalMethodLogTest |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java | {
"start": 6603,
"end": 6909
} | interface ____ extends Vector.Builder permits IntVectorBuilder, FixedBuilder {
/**
* Appends a int to the current entry.
*/
Builder appendInt(int value);
@Override
IntVector build();
}
/**
* A builder that never grows.
*/
sealed | Builder |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/EnumUtils.java | {
"start": 19964,
"end": 20112
} | enum ____, the least significant digits rightmost, not {@code null}.
* @param <E> the type of the enumeration.
* @return a set of | values |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/http/HttpClientOptions.java | {
"start": 1107,
"end": 33377
} | class ____ extends ClientOptionsBase {
/**
* The default maximum number of concurrent streams per connection for HTTP/2 = -1
*/
public static final int DEFAULT_HTTP2_MULTIPLEXING_LIMIT = -1;
/**
* The default connection window size for HTTP/2 = -1
*/
public static final int DEFAULT_HTTP2_CONNECTION_WINDOW_SIZE = -1;
/**
* The default keep alive timeout for HTTP/2 connection can send = 60 seconds
*/
public static final int DEFAULT_HTTP2_KEEP_ALIVE_TIMEOUT = 60;
/**
* Default value of whether keep-alive is enabled = {@code true}
*/
public static final boolean DEFAULT_KEEP_ALIVE = true;
/**
* Default value of whether pipe-lining is enabled = {@code false}
*/
public static final boolean DEFAULT_PIPELINING = false;
/**
* The default maximum number of requests an HTTP/1.1 pipe-lined connection can send = 10
*/
public static final int DEFAULT_PIPELINING_LIMIT = 10;
/**
* The default keep alive timeout for HTTP/1.1 connection can send = 60 seconds
*/
public static final int DEFAULT_KEEP_ALIVE_TIMEOUT = 60;
/**
* Whether the client should send requests with an {@code accepting-encoding} header set to a compression algorithm by default = {@code false}
*/
public static final boolean DEFAULT_DECOMPRESSION_SUPPORTED = false;
/**
* Default value of whether hostname verification (for SSL/TLS) is enabled = {@code true}
*/
public static final boolean DEFAULT_VERIFY_HOST = true;
/**
* The default value for host name = "localhost"
*/
public static final String DEFAULT_DEFAULT_HOST = "localhost";
/**
* The default value for port = 80
*/
public static final int DEFAULT_DEFAULT_PORT = 80;
/**
* The default protocol version = HTTP/1.1
*/
public static final HttpVersion DEFAULT_PROTOCOL_VERSION = HttpVersion.HTTP_1_1;
/**
* Default max HTTP chunk size = 8192
*/
public static final int DEFAULT_MAX_CHUNK_SIZE = 8192;
/**
* Default max length of the initial line (e.g. {@code "HTTP/1.1 200 OK"}) = 4096
*/
public static final int DEFAULT_MAX_INITIAL_LINE_LENGTH = 4096;
/**
* Default max length of all headers = 8192
*/
public static final int DEFAULT_MAX_HEADER_SIZE = 8192;
/**
* Default Application-Layer Protocol Negotiation versions = [] (automatic according to protocol version)
*/
public static final List<HttpVersion> DEFAULT_ALPN_VERSIONS = Collections.emptyList();
/**
* Default using HTTP/1.1 upgrade for establishing an <i>h2C</i> connection = {@code true}
*/
public static final boolean DEFAULT_HTTP2_CLEAR_TEXT_UPGRADE = true;
/**
* Default to use a preflight OPTIONS request for <i>h2C</i> without prior knowledge connection = {@code false}
*/
public static final boolean DEFAULT_HTTP2_CLEAR_TEXT_UPGRADE_WITH_PREFLIGHT_REQUEST = false;
/**
* Default maximum length of the aggregated content in bytes
*/
public static final int DEFAULT_HTTP2_UPGRADE_MAX_CONTENT_LENGTH = 65536;
/*
* Default max redirect = 16
*/
public static final int DEFAULT_MAX_REDIRECTS = 16;
/*
* Default force SNI = {@code false}
*/
public static final boolean DEFAULT_FORCE_SNI = false;
/**
* Default initial buffer size for HttpObjectDecoder = 128 bytes
*/
public static final int DEFAULT_DECODER_INITIAL_BUFFER_SIZE = 128;
/**
* Default tracing control = {@link TracingPolicy#PROPAGATE}
*/
public static final TracingPolicy DEFAULT_TRACING_POLICY = TracingPolicy.PROPAGATE;
/**
* Default shared client = {@code false}
*/
public static final boolean DEFAULT_SHARED = false;
/**
* Actual name of anonymous shared client = {@code __vertx.DEFAULT}
*/
public static final String DEFAULT_NAME = "__vertx.DEFAULT";
/**
* Use HTTP/2 multiplex implementation = {@code false}
*/
public static final boolean DEFAULT_HTTP_2_MULTIPLEX_IMPLEMENTATION = false;
private boolean verifyHost = true;
private boolean keepAlive;
private int keepAliveTimeout;
private int pipeliningLimit;
private boolean pipelining;
private int http2MultiplexingLimit;
private int http2ConnectionWindowSize;
private int http2KeepAliveTimeout;
private int http2UpgradeMaxContentLength;
private boolean http2MultiplexImplementation;
private boolean decompressionSupported;
private String defaultHost;
private int defaultPort;
private HttpVersion protocolVersion;
private int maxChunkSize;
private int maxInitialLineLength;
private int maxHeaderSize;
private Http2Settings initialSettings;
private boolean http2ClearTextUpgrade;
private boolean http2ClearTextUpgradeWithPreflightRequest;
private int maxRedirects;
private boolean forceSni;
private int decoderInitialBufferSize;
private TracingPolicy tracingPolicy;
private boolean shared;
private String name;
/**
* Default constructor
*/
public HttpClientOptions() {
super();
init();
}
/**
* Copy constructor
*
* @param other the options to copy
*/
public HttpClientOptions(ClientOptionsBase other) {
super(other);
init();
}
/**
* Copy constructor
*
* @param other the options to copy
*/
public HttpClientOptions(HttpClientOptions other) {
super(other);
this.verifyHost = other.isVerifyHost();
this.keepAlive = other.isKeepAlive();
this.keepAliveTimeout = other.getKeepAliveTimeout();
this.pipelining = other.isPipelining();
this.pipeliningLimit = other.getPipeliningLimit();
this.http2MultiplexingLimit = other.http2MultiplexingLimit;
this.http2ConnectionWindowSize = other.http2ConnectionWindowSize;
this.http2KeepAliveTimeout = other.getHttp2KeepAliveTimeout();
this.http2UpgradeMaxContentLength = other.getHttp2UpgradeMaxContentLength();
this.http2MultiplexImplementation = other.getHttp2MultiplexImplementation();
this.decompressionSupported = other.decompressionSupported;
this.defaultHost = other.defaultHost;
this.defaultPort = other.defaultPort;
this.protocolVersion = other.protocolVersion;
this.maxChunkSize = other.maxChunkSize;
this.maxInitialLineLength = other.getMaxInitialLineLength();
this.maxHeaderSize = other.getMaxHeaderSize();
this.initialSettings = other.initialSettings != null ? new Http2Settings(other.initialSettings) : null;
this.http2ClearTextUpgrade = other.http2ClearTextUpgrade;
this.http2ClearTextUpgradeWithPreflightRequest = other.http2ClearTextUpgradeWithPreflightRequest;
this.maxRedirects = other.maxRedirects;
this.forceSni = other.forceSni;
this.decoderInitialBufferSize = other.getDecoderInitialBufferSize();
this.tracingPolicy = other.tracingPolicy;
this.shared = other.shared;
this.name = other.name;
}
/**
* Constructor to create an options from JSON
*
* @param json the JSON
*/
public HttpClientOptions(JsonObject json) {
super(json);
init();
HttpClientOptionsConverter.fromJson(json, this);
}
/**
* Convert to JSON
*
* @return the JSON
*/
public JsonObject toJson() {
JsonObject json = super.toJson();
HttpClientOptionsConverter.toJson(this, json);
return json;
}
private void init() {
verifyHost = DEFAULT_VERIFY_HOST;
keepAlive = DEFAULT_KEEP_ALIVE;
keepAliveTimeout = DEFAULT_KEEP_ALIVE_TIMEOUT;
pipelining = DEFAULT_PIPELINING;
pipeliningLimit = DEFAULT_PIPELINING_LIMIT;
http2MultiplexingLimit = DEFAULT_HTTP2_MULTIPLEXING_LIMIT;
http2ConnectionWindowSize = DEFAULT_HTTP2_CONNECTION_WINDOW_SIZE;
http2KeepAliveTimeout = DEFAULT_HTTP2_KEEP_ALIVE_TIMEOUT;
http2UpgradeMaxContentLength = DEFAULT_HTTP2_UPGRADE_MAX_CONTENT_LENGTH;
http2MultiplexImplementation = DEFAULT_HTTP_2_MULTIPLEX_IMPLEMENTATION;
decompressionSupported = DEFAULT_DECOMPRESSION_SUPPORTED;
defaultHost = DEFAULT_DEFAULT_HOST;
defaultPort = DEFAULT_DEFAULT_PORT;
protocolVersion = DEFAULT_PROTOCOL_VERSION;
maxChunkSize = DEFAULT_MAX_CHUNK_SIZE;
maxInitialLineLength = DEFAULT_MAX_INITIAL_LINE_LENGTH;
maxHeaderSize = DEFAULT_MAX_HEADER_SIZE;
initialSettings = new Http2Settings();
http2ClearTextUpgrade = DEFAULT_HTTP2_CLEAR_TEXT_UPGRADE;
http2ClearTextUpgradeWithPreflightRequest = DEFAULT_HTTP2_CLEAR_TEXT_UPGRADE_WITH_PREFLIGHT_REQUEST;
maxRedirects = DEFAULT_MAX_REDIRECTS;
forceSni = DEFAULT_FORCE_SNI;
decoderInitialBufferSize = DEFAULT_DECODER_INITIAL_BUFFER_SIZE;
tracingPolicy = DEFAULT_TRACING_POLICY;
shared = DEFAULT_SHARED;
name = DEFAULT_NAME;
}
@Override
protected ClientSSLOptions createSSLOptions() {
return super.createSSLOptions().setApplicationLayerProtocols(HttpUtils.fromHttpAlpnVersions(DEFAULT_ALPN_VERSIONS));
}
@Override
public HttpClientOptions setSendBufferSize(int sendBufferSize) {
super.setSendBufferSize(sendBufferSize);
return this;
}
@Override
public HttpClientOptions setReceiveBufferSize(int receiveBufferSize) {
super.setReceiveBufferSize(receiveBufferSize);
return this;
}
@Override
public HttpClientOptions setReuseAddress(boolean reuseAddress) {
super.setReuseAddress(reuseAddress);
return this;
}
@Override
public HttpClientOptions setReusePort(boolean reusePort) {
super.setReusePort(reusePort);
return this;
}
@Override
public HttpClientOptions setTrafficClass(int trafficClass) {
super.setTrafficClass(trafficClass);
return this;
}
@Override
public HttpClientOptions setTcpNoDelay(boolean tcpNoDelay) {
super.setTcpNoDelay(tcpNoDelay);
return this;
}
@Override
public HttpClientOptions setTcpKeepAlive(boolean tcpKeepAlive) {
super.setTcpKeepAlive(tcpKeepAlive);
return this;
}
@Override
public HttpClientOptions setSoLinger(int soLinger) {
super.setSoLinger(soLinger);
return this;
}
@Override
public HttpClientOptions setIdleTimeout(int idleTimeout) {
super.setIdleTimeout(idleTimeout);
return this;
}
@Override
public HttpClientOptions setReadIdleTimeout(int idleTimeout) {
super.setReadIdleTimeout(idleTimeout);
return this;
}
@Override
public HttpClientOptions setWriteIdleTimeout(int idleTimeout) {
super.setWriteIdleTimeout(idleTimeout);
return this;
}
@Override
public HttpClientOptions setIdleTimeoutUnit(TimeUnit idleTimeoutUnit) {
super.setIdleTimeoutUnit(idleTimeoutUnit);
return this;
}
@Override
public HttpClientOptions setSsl(boolean ssl) {
super.setSsl(ssl);
return this;
}
@Override
public HttpClientOptions setKeyCertOptions(KeyCertOptions options) {
super.setKeyCertOptions(options);
return this;
}
@Override
public HttpClientOptions setTrustOptions(TrustOptions options) {
super.setTrustOptions(options);
return this;
}
@Override
public HttpClientOptions addEnabledCipherSuite(String suite) {
super.addEnabledCipherSuite(suite);
return this;
}
@Override
public HttpClientOptions removeEnabledCipherSuite(String suite) {
super.removeEnabledCipherSuite(suite);
return this;
}
@Override
public HttpClientOptions addEnabledSecureTransportProtocol(final String protocol) {
super.addEnabledSecureTransportProtocol(protocol);
return this;
}
@Override
public HttpClientOptions removeEnabledSecureTransportProtocol(String protocol) {
return (HttpClientOptions) super.removeEnabledSecureTransportProtocol(protocol);
}
@Override
public HttpClientOptions setTcpFastOpen(boolean tcpFastOpen) {
return (HttpClientOptions) super.setTcpFastOpen(tcpFastOpen);
}
@Override
public HttpClientOptions setTcpCork(boolean tcpCork) {
return (HttpClientOptions) super.setTcpCork(tcpCork);
}
@Override
public HttpClientOptions setTcpQuickAck(boolean tcpQuickAck) {
return (HttpClientOptions) super.setTcpQuickAck(tcpQuickAck);
}
@Override
public HttpClientOptions setTcpUserTimeout(int tcpUserTimeout) {
return (HttpClientOptions) super.setTcpUserTimeout(tcpUserTimeout);
}
@Override
public HttpClientOptions addCrlPath(String crlPath) throws NullPointerException {
return (HttpClientOptions) super.addCrlPath(crlPath);
}
@Override
public HttpClientOptions addCrlValue(Buffer crlValue) throws NullPointerException {
return (HttpClientOptions) super.addCrlValue(crlValue);
}
@Override
public HttpClientOptions setConnectTimeout(int connectTimeout) {
super.setConnectTimeout(connectTimeout);
return this;
}
@Override
public HttpClientOptions setTrustAll(boolean trustAll) {
super.setTrustAll(trustAll);
return this;
}
@Override
public HttpClientOptions setEnabledSecureTransportProtocols(Set<String> enabledSecureTransportProtocols) {
super.setEnabledSecureTransportProtocols(enabledSecureTransportProtocols);
return this;
}
@Override
public HttpClientOptions setSslHandshakeTimeout(long sslHandshakeTimeout) {
super.setSslHandshakeTimeout(sslHandshakeTimeout);
return this;
}
@Override
public HttpClientOptions setSslHandshakeTimeoutUnit(TimeUnit sslHandshakeTimeoutUnit) {
super.setSslHandshakeTimeoutUnit(sslHandshakeTimeoutUnit);
return this;
}
/**
* @return the maximum number of concurrent streams for an HTTP/2 connection, {@code -1} means
* the value sent by the server
*/
public int getHttp2MultiplexingLimit() {
return http2MultiplexingLimit;
}
/**
* Set a client limit of the number concurrent streams for each HTTP/2 connection, this limits the number
* of streams the client can create for a connection. The effective number of streams for a
* connection is the min of this value and the server's initial settings.
* <p/>
* Setting the value to {@code -1} means to use the value sent by the server's initial settings.
* {@code -1} is the default value.
*
* @param limit the maximum concurrent for an HTTP/2 connection
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setHttp2MultiplexingLimit(int limit) {
if (limit == 0 || limit < -1) {
throw new IllegalArgumentException("maxPoolSize must be > 0 or -1 (disabled)");
}
this.http2MultiplexingLimit = limit;
return this;
}
/**
* @return the default HTTP/2 connection window size
*/
public int getHttp2ConnectionWindowSize() {
return http2ConnectionWindowSize;
}
/**
* Set the default HTTP/2 connection window size. It overrides the initial window
* size set by {@link Http2Settings#getInitialWindowSize}, so the connection window size
* is greater than for its streams, in order the data throughput.
* <p/>
* A value of {@code -1} reuses the initial window size setting.
*
* @param http2ConnectionWindowSize the window size applied to the connection
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setHttp2ConnectionWindowSize(int http2ConnectionWindowSize) {
this.http2ConnectionWindowSize = http2ConnectionWindowSize;
return this;
}
/**
* @return the keep alive timeout value in seconds for HTTP/2 connections
*/
public int getHttp2KeepAliveTimeout() {
return http2KeepAliveTimeout;
}
/**
* Set the keep alive timeout for HTTP/2 connections, in seconds.
* <p/>
* This value determines how long a connection remains unused in the pool before being evicted and closed.
* <p/>
* A timeout of {@code 0} means there is no timeout.
*
* @param keepAliveTimeout the timeout, in seconds
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setHttp2KeepAliveTimeout(int keepAliveTimeout) {
if (keepAliveTimeout < 0) {
throw new IllegalArgumentException("HTTP/2 keepAliveTimeout must be >= 0");
}
this.http2KeepAliveTimeout = keepAliveTimeout;
return this;
}
/**
* @return the HTTP/2 upgrade maximum length of the aggregated content in bytes
*/
public int getHttp2UpgradeMaxContentLength() {
return http2UpgradeMaxContentLength;
}
/**
* Set the HTTP/2 upgrade maximum length of the aggregated content in bytes.
* This is only taken into account when {@link HttpClientOptions#http2ClearTextUpgradeWithPreflightRequest} is set to {@code false} (which is the default).
* When {@link HttpClientOptions#http2ClearTextUpgradeWithPreflightRequest} is {@code true}, then the client makes a preflight OPTIONS request
* and the upgrade will not send a body, voiding the requirements.
*
* @param http2UpgradeMaxContentLength the length, in bytes
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setHttp2UpgradeMaxContentLength(int http2UpgradeMaxContentLength) {
this.http2UpgradeMaxContentLength = http2UpgradeMaxContentLength;
return this;
}
/**
* @return whether to use the HTTP/2 implementation based on multiplexed channel
*/
public boolean getHttp2MultiplexImplementation() {
return http2MultiplexImplementation;
}
/**
* Set which HTTP/2 implementation to use
*
* @param http2MultiplexImplementation whether to use the HTTP/2 multiplex implementation
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setHttp2MultiplexImplementation(boolean http2MultiplexImplementation) {
this.http2MultiplexImplementation = http2MultiplexImplementation;
return this;
}
/**
* Is keep alive enabled on the client?
*
* @return {@code true} if enabled
*/
public boolean isKeepAlive() {
return keepAlive;
}
/**
* Set whether keep alive is enabled on the client
*
* @param keepAlive {@code true} if enabled
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setKeepAlive(boolean keepAlive) {
this.keepAlive = keepAlive;
return this;
}
/**
* @return the keep alive timeout value in seconds for HTTP/1.x connections
*/
public int getKeepAliveTimeout() {
return keepAliveTimeout;
}
/**
* Set the keep alive timeout for HTTP/1.x, in seconds.
* <p/>
* This value determines how long a connection remains unused in the pool before being evicted and closed.
* <p/>
* A timeout of {@code 0} means there is no timeout.
*
* @param keepAliveTimeout the timeout, in seconds
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setKeepAliveTimeout(int keepAliveTimeout) {
if (keepAliveTimeout < 0) {
throw new IllegalArgumentException("keepAliveTimeout must be >= 0");
}
this.keepAliveTimeout = keepAliveTimeout;
return this;
}
/**
* Is pipe-lining enabled on the client
*
* @return {@code true} if pipe-lining is enabled
*/
public boolean isPipelining() {
return pipelining;
}
/**
* Set whether pipe-lining is enabled on the client
*
* @param pipelining {@code true} if enabled
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setPipelining(boolean pipelining) {
this.pipelining = pipelining;
return this;
}
/**
* @return the limit of pending requests a pipe-lined HTTP/1 connection can send
*/
public int getPipeliningLimit() {
return pipeliningLimit;
}
/**
* Set the limit of pending requests a pipe-lined HTTP/1 connection can send.
*
* @param limit the limit of pending requests
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setPipeliningLimit(int limit) {
if (limit < 1) {
throw new IllegalArgumentException("pipeliningLimit must be > 0");
}
this.pipeliningLimit = limit;
return this;
}
/**
* Is hostname verification (for SSL/TLS) enabled?
*
* @return {@code true} if enabled
*/
public boolean isVerifyHost() {
return verifyHost;
}
/**
* Set whether hostname verification is enabled
*
* @param verifyHost {@code true} if enabled
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setVerifyHost(boolean verifyHost) {
this.verifyHost = verifyHost;
return this;
}
/**
* @return {@code true} if the client should send requests with an {@code accepting-encoding} header set to a compression algorithm, {@code false} otherwise
*/
public boolean isDecompressionSupported() {
return decompressionSupported;
}
/**
* Whether the client should send requests with an {@code accepting-encoding} header set to a compression algorithm.
*
* @param decompressionSupported {@code true} if the client should send a request with an {@code accepting-encoding} header set to a compression algorithm, {@code false} otherwise
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setDecompressionSupported(boolean decompressionSupported) {
this.decompressionSupported = decompressionSupported;
return this;
}
/**
* Get the default host name to be used by this client in requests if none is provided when making the request.
*
* @return the default host name
*/
public String getDefaultHost() {
return defaultHost;
}
/**
* Set the default host name to be used by this client in requests if none is provided when making the request.
*
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setDefaultHost(String defaultHost) {
this.defaultHost = defaultHost;
return this;
}
/**
* Get the default port to be used by this client in requests if none is provided when making the request.
*
* @return the default port
*/
public int getDefaultPort() {
return defaultPort;
}
/**
* Set the default port to be used by this client in requests if none is provided when making the request.
*
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setDefaultPort(int defaultPort) {
this.defaultPort = defaultPort;
return this;
}
/**
* Get the protocol version.
*
* @return the protocol version
*/
public HttpVersion getProtocolVersion() {
return protocolVersion;
}
/**
* Set the protocol version.
*
* @param protocolVersion the protocol version
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setProtocolVersion(HttpVersion protocolVersion) {
if (protocolVersion == null) {
throw new IllegalArgumentException("protocolVersion must not be null");
}
this.protocolVersion = protocolVersion;
return this;
}
/**
* Set the maximum HTTP chunk size
* @param maxChunkSize the maximum chunk size
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setMaxChunkSize(int maxChunkSize) {
this.maxChunkSize = maxChunkSize;
return this;
}
/**
* Returns the maximum HTTP chunk size
* @return the maximum HTTP chunk size
*/
public int getMaxChunkSize() {
return maxChunkSize;
}
/**
* @return the maximum length of the initial line for HTTP/1.x (e.g. {@code "GET / HTTP/1.0"})
*/
public int getMaxInitialLineLength() {
return maxInitialLineLength;
}
/**
* Set the maximum length of the initial line for HTTP/1.x (e.g. {@code "HTTP/1.1 200 OK"})
*
* @param maxInitialLineLength the new maximum initial length
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setMaxInitialLineLength(int maxInitialLineLength) {
this.maxInitialLineLength = maxInitialLineLength;
return this;
}
/**
* @return Returns the maximum length of all headers for HTTP/1.x
*/
public int getMaxHeaderSize() {
return maxHeaderSize;
}
/**
* Set the maximum length of all headers for HTTP/1.x .
*
* @param maxHeaderSize the new maximum length
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setMaxHeaderSize(int maxHeaderSize) {
this.maxHeaderSize = maxHeaderSize;
return this;
}
/**
* @return the initial HTTP/2 connection settings
*/
public Http2Settings getInitialSettings() {
return initialSettings;
}
/**
* Set the HTTP/2 connection settings immediately sent by to the server when the client connects.
*
* @param settings the settings value
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setInitialSettings(Http2Settings settings) {
this.initialSettings = settings;
return this;
}
@Override
public HttpClientOptions setUseAlpn(boolean useAlpn) {
return (HttpClientOptions) super.setUseAlpn(useAlpn);
}
@Override
public HttpClientOptions setSslEngineOptions(SSLEngineOptions sslEngineOptions) {
return (HttpClientOptions) super.setSslEngineOptions(sslEngineOptions);
}
/**
* @return the list of protocol versions to provide during the Application-Layer Protocol Negotiation. When
* the list is empty, the client provides a best effort list according to {@link #setProtocolVersion}
*/
public List<HttpVersion> getAlpnVersions() {
List<String> applicationLayerProtocols = getOrCreateSSLOptions().getApplicationLayerProtocols();
return applicationLayerProtocols != null ? HttpUtils.toHttpAlpnVersions(applicationLayerProtocols ) : null;
}
/**
* Set the list of protocol versions to provide to the server during the Application-Layer Protocol Negotiation.
* When the list is empty, the client makes a best effort list according to {@link #setProtocolVersion}:
*
* <ul>
* <li>{@link HttpVersion#HTTP_2}: [ "h2", "http/1.1" ]</li>
* <li>otherwise: [{@link #getProtocolVersion()}]</li>
* </ul>
*
* @param alpnVersions the versions
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setAlpnVersions(List<HttpVersion> alpnVersions) {
ClientSSLOptions sslOptions = getOrCreateSSLOptions();
if (alpnVersions != null) {
sslOptions.setApplicationLayerProtocols(HttpUtils.fromHttpAlpnVersions(alpnVersions));
} else {
sslOptions.setApplicationLayerProtocols(null);
}
return this;
}
/**
* @return {@code true} when an <i>h2c</i> connection is established using an HTTP/1.1 upgrade request, {@code false} when directly
*/
public boolean isHttp2ClearTextUpgrade() {
return http2ClearTextUpgrade;
}
/**
* Set to {@code true} when an <i>h2c</i> connection is established using an HTTP/1.1 upgrade request, and {@code false}
* when an <i>h2c</i> connection is established directly (with prior knowledge).
*
* @param value the upgrade value
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setHttp2ClearTextUpgrade(boolean value) {
this.http2ClearTextUpgrade = value;
return this;
}
/**
* @return {@code true} when an <i>h2c</i> connection established using an HTTP/1.1 upgrade request should perform
* a preflight {@code OPTIONS} request to the origin server to establish the <i>h2c</i> connection
*/
public boolean isHttp2ClearTextUpgradeWithPreflightRequest() {
return http2ClearTextUpgradeWithPreflightRequest;
}
/**
* Set to {@code true} when an <i>h2c</i> connection established using an HTTP/1.1 upgrade request should perform
* a preflight {@code OPTIONS} request to the origin server to establish the <i>h2c</i> connection.
*
* @param value the upgrade value
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setHttp2ClearTextUpgradeWithPreflightRequest(boolean value) {
this.http2ClearTextUpgradeWithPreflightRequest = value;
return this;
}
/**
* @return the maximum number of redirection a request can follow
*/
public int getMaxRedirects() {
return maxRedirects;
}
/**
* Set to {@code maxRedirects} the maximum number of redirection a request can follow.
*
* @param maxRedirects the maximum number of redirection
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setMaxRedirects(int maxRedirects) {
this.maxRedirects = maxRedirects;
return this;
}
/**
* @return whether the client should always use SNI on TLS/SSL connections
*/
public boolean isForceSni() {
return forceSni;
}
/**
* By default, the server name is only sent for Fully Qualified Domain Name (FQDN), setting
* this property to {@code true} forces the server name to be always sent.
*
* @param forceSni {@code true} when the client should always use SNI on TLS/SSL connections
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setForceSni(boolean forceSni) {
this.forceSni = forceSni;
return this;
}
public HttpClientOptions setMetricsName(String metricsName) {
return (HttpClientOptions) super.setMetricsName(metricsName);
}
public HttpClientOptions setProxyOptions(ProxyOptions proxyOptions) {
return (HttpClientOptions) super.setProxyOptions(proxyOptions);
}
@Override
public HttpClientOptions setNonProxyHosts(List<String> nonProxyHosts) {
return (HttpClientOptions) super.setNonProxyHosts(nonProxyHosts);
}
@Override
public HttpClientOptions addNonProxyHost(String nonProxyHost) {
return (HttpClientOptions) super.addNonProxyHost(nonProxyHost);
}
@Override
public HttpClientOptions setLocalAddress(String localAddress) {
return (HttpClientOptions) super.setLocalAddress(localAddress);
}
@Override
public HttpClientOptions setLogActivity(boolean logEnabled) {
return (HttpClientOptions) super.setLogActivity(logEnabled);
}
@Override
public HttpClientOptions setActivityLogDataFormat(ByteBufFormat activityLogDataFormat) {
return (HttpClientOptions) super.setActivityLogDataFormat(activityLogDataFormat);
}
/**
* @return the initial buffer size for the HTTP decoder
*/
public int getDecoderInitialBufferSize() { return decoderInitialBufferSize; }
/**
* set to {@code initialBufferSizeHttpDecoder} the initial buffer of the HttpDecoder.
*
* @param decoderInitialBufferSize the initial buffer size
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setDecoderInitialBufferSize(int decoderInitialBufferSize) {
Arguments.require(decoderInitialBufferSize > 0, "initialBufferSizeHttpDecoder must be > 0");
this.decoderInitialBufferSize = decoderInitialBufferSize;
return this;
}
/**
* @return the tracing policy
*/
public TracingPolicy getTracingPolicy() {
return tracingPolicy;
}
/**
* Set the tracing policy for the client behavior when Vert.x has tracing enabled.
*
* @param tracingPolicy the tracing policy
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setTracingPolicy(TracingPolicy tracingPolicy) {
this.tracingPolicy = tracingPolicy;
return this;
}
/**
* @return whether the pool is shared
*/
public boolean isShared() {
return shared;
}
/**
* Set to {@code true} to share the client.
*
* <p> There can be multiple shared clients distinguished by {@link #getName()}, when no specific
* name is set, the {@link #DEFAULT_NAME} is used.
*
* @param shared {@code true} to use a shared client
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setShared(boolean shared) {
this.shared = shared;
return this;
}
/**
* @return the client name used for sharing
*/
public String getName() {
return name;
}
/**
* Set the client name, used when the client is shared, otherwise ignored.
* @param name the new name
* @return a reference to this, so the API can be used fluently
*/
public HttpClientOptions setName(String name) {
Objects.requireNonNull(name, "Client name cannot be null");
this.name = name;
return this;
}
}
| HttpClientOptions |
java | elastic__elasticsearch | libs/grok/src/test/java/org/elasticsearch/grok/MatcherWatchdogTests.java | {
"start": 1193,
"end": 4790
} | class ____ extends ESTestCase {
public void testInterrupt() throws Exception {
AtomicBoolean run = new AtomicBoolean(true); // to avoid a lingering thread when test has completed
MatcherWatchdog watchdog = MatcherWatchdog.newInstance(10, 100, System::currentTimeMillis, (delay, command) -> {
try {
Thread.sleep(delay);
} catch (InterruptedException e) {
throw new AssertionError(e);
}
Thread thread = new Thread(() -> {
if (run.get()) {
command.run();
}
});
thread.start();
});
Map<?, ?> registry = ((MatcherWatchdog.Default) watchdog).registry;
assertThat(registry.size(), is(0));
// need to call #register() method on a different thread, assertBusy() fails if current thread gets interrupted
AtomicBoolean interrupted = new AtomicBoolean(false);
Thread thread = new Thread(() -> {
Matcher matcher = mock(Matcher.class);
watchdog.register(matcher);
verify(matcher, timeout(9999).atLeastOnce()).interrupt();
interrupted.set(true);
while (run.get()) {
} // wait here so that the size of the registry can be asserted
watchdog.unregister(matcher);
});
thread.start();
assertBusy(() -> {
assertThat(interrupted.get(), is(true));
assertThat(registry.size(), is(1));
});
run.set(false);
assertBusy(() -> { assertThat(registry.size(), is(0)); });
}
public void testIdleIfNothingRegistered() throws Exception {
long interval = 1L;
ScheduledExecutorService threadPool = mock(ScheduledExecutorService.class);
MatcherWatchdog watchdog = MatcherWatchdog.newInstance(
interval,
Long.MAX_VALUE,
System::currentTimeMillis,
(delay, command) -> threadPool.schedule(command, delay, TimeUnit.MILLISECONDS)
);
// Periodic action is not scheduled because no thread is registered
verifyNoMoreInteractions(threadPool);
PlainActionFuture<Runnable> commandFuture = new PlainActionFuture<>();
// Periodic action is scheduled because a thread is registered
doAnswer(invocationOnMock -> {
commandFuture.onResponse(invocationOnMock.getArgument(0));
return null;
}).when(threadPool).schedule(any(Runnable.class), eq(interval), eq(TimeUnit.MILLISECONDS));
Matcher matcher = mock(Matcher.class);
watchdog.register(matcher);
// Registering the first thread should have caused the command to get scheduled again
Runnable command = safeGet(commandFuture);
Mockito.reset(threadPool);
watchdog.unregister(matcher);
command.run();
// Periodic action is not scheduled again because no thread is registered
verifyNoMoreInteractions(threadPool);
watchdog.register(matcher);
Thread otherThread = new Thread(() -> {
Matcher otherMatcher = mock(Matcher.class);
watchdog.register(otherMatcher);
});
try {
verify(threadPool).schedule(any(Runnable.class), eq(interval), eq(TimeUnit.MILLISECONDS));
// Registering a second thread does not cause the command to get scheduled twice
verifyNoMoreInteractions(threadPool);
otherThread.start();
} finally {
otherThread.join();
}
}
}
| MatcherWatchdogTests |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationprocessor/metadata/JsonMarshallerTests.java | {
"start": 1212,
"end": 14756
} | class ____ {
@Test
void marshallAndUnmarshal() throws Exception {
ConfigurationMetadata metadata = new ConfigurationMetadata();
metadata.add(ItemMetadata.newProperty("a", "b", StringBuffer.class.getName(), InputStream.class.getName(), null,
"desc", "x", new ItemDeprecation("Deprecation comment", "b.c.d", "1.2.3")));
metadata.add(ItemMetadata.newProperty("b.c.d", null, null, null, null, null, null, null));
metadata.add(ItemMetadata.newProperty("c", null, null, null, null, null, 123, null));
metadata.add(ItemMetadata.newProperty("d", null, null, null, null, null, true, null));
metadata.add(ItemMetadata.newProperty("e", null, null, null, null, null, new String[] { "y", "n" }, null));
metadata.add(ItemMetadata.newProperty("f", null, null, null, null, null, new Boolean[] { true, false }, null));
metadata.add(ItemMetadata.newGroup("d", null, null, null));
metadata.add(ItemMetadata.newGroup("e", null, null, "sourceMethod"));
metadata.add(ItemHint.newHint("a.b"));
metadata.add(ItemHint.newHint("c", new ItemHint.ValueHint(123, "hey"), new ItemHint.ValueHint(456, null)));
metadata.add(new ItemHint("d", null,
Arrays.asList(new ItemHint.ValueProvider("first", Collections.singletonMap("target", "foo")),
new ItemHint.ValueProvider("second", null))));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
JsonMarshaller marshaller = new JsonMarshaller();
marshaller.write(metadata, outputStream);
ConfigurationMetadata read = marshaller.read(new ByteArrayInputStream(outputStream.toByteArray()));
assertThat(read).has(Metadata.withProperty("a.b", StringBuffer.class)
.fromSource(InputStream.class)
.withDescription("desc")
.withDefaultValue("x")
.withDeprecation("Deprecation comment", "b.c.d", "1.2.3"));
assertThat(read).has(Metadata.withProperty("b.c.d"));
assertThat(read).has(Metadata.withProperty("c").withDefaultValue(123));
assertThat(read).has(Metadata.withProperty("d").withDefaultValue(true));
assertThat(read).has(Metadata.withProperty("e").withDefaultValue(new String[] { "y", "n" }));
assertThat(read).has(Metadata.withProperty("f").withDefaultValue(new Object[] { true, false }));
assertThat(read).has(Metadata.withGroup("d"));
assertThat(read).has(Metadata.withGroup("e").fromSourceMethod("sourceMethod"));
assertThat(read).has(Metadata.withHint("a.b"));
assertThat(read).has(Metadata.withHint("c").withValue(0, 123, "hey").withValue(1, 456, null));
assertThat(read).has(Metadata.withHint("d").withProvider("first", "target", "foo").withProvider("second"));
}
@Test
void marshallOrderItems() throws IOException {
ConfigurationMetadata metadata = new ConfigurationMetadata();
metadata.add(ItemHint.newHint("fff"));
metadata.add(ItemHint.newHint("eee"));
metadata.add(ItemMetadata.newProperty("com.example.bravo", "bbb", null, null, null, null, null, null));
metadata.add(ItemMetadata.newProperty("com.example.bravo", "aaa", null, null, null, null, null, null));
metadata.add(ItemMetadata.newProperty("com.example.alpha", "ddd", null, null, null, null, null, null));
metadata.add(ItemMetadata.newProperty("com.example.alpha", "ccc", null, null, null, null, null, null));
metadata.add(ItemMetadata.newGroup("com.acme.bravo", "com.example.AnotherTestProperties", null, null));
metadata.add(ItemMetadata.newGroup("com.acme.alpha", "com.example.TestProperties", null, null));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
JsonMarshaller marshaller = new JsonMarshaller();
marshaller.write(metadata, outputStream);
String json = outputStream.toString();
assertThat(json).containsSubsequence("\"groups\"", "\"com.acme.alpha\"", "\"com.acme.bravo\"", "\"properties\"",
"\"com.example.alpha.ccc\"", "\"com.example.alpha.ddd\"", "\"com.example.bravo.aaa\"",
"\"com.example.bravo.bbb\"", "\"hints\"", "\"eee\"", "\"fff\"");
}
@Test
void marshallPutDeprecatedItemsAtTheEnd() throws IOException {
ConfigurationMetadata metadata = new ConfigurationMetadata();
metadata.add(ItemMetadata.newProperty("com.example.bravo", "bbb", null, null, null, null, null, null));
metadata.add(ItemMetadata.newProperty("com.example.bravo", "aaa", null, null, null, null, null,
new ItemDeprecation(null, null, null, "warning")));
metadata.add(ItemMetadata.newProperty("com.example.alpha", "ddd", null, null, null, null, null, null));
metadata.add(ItemMetadata.newProperty("com.example.alpha", "ccc", null, null, null, null, null,
new ItemDeprecation(null, null, null, "warning")));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
JsonMarshaller marshaller = new JsonMarshaller();
marshaller.write(metadata, outputStream);
String json = outputStream.toString();
assertThat(json).containsSubsequence("\"properties\"", "\"com.example.alpha.ddd\"", "\"com.example.bravo.bbb\"",
"\"com.example.alpha.ccc\"", "\"com.example.bravo.aaa\"");
}
@Test
void orderingForSameGroupNames() throws IOException {
ConfigurationMetadata metadata = new ConfigurationMetadata();
metadata.add(ItemMetadata.newGroup("com.acme.alpha", null, "com.example.Foo", null));
metadata.add(ItemMetadata.newGroup("com.acme.alpha", null, "com.example.Bar", null));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
JsonMarshaller marshaller = new JsonMarshaller();
marshaller.write(metadata, outputStream);
String json = outputStream.toString();
assertThat(json).containsSubsequence("\"groups\"", "\"name\": \"com.acme.alpha\"",
"\"sourceType\": \"com.example.Bar\"", "\"name\": \"com.acme.alpha\"",
"\"sourceType\": \"com.example.Foo\"");
}
@Test
void orderingForSamePropertyNames() throws IOException {
ConfigurationMetadata metadata = new ConfigurationMetadata();
metadata.add(ItemMetadata.newProperty("com.example.bravo", "aaa", "java.lang.Boolean", "com.example.Foo", null,
null, null, null));
metadata.add(ItemMetadata.newProperty("com.example.bravo", "aaa", "java.lang.Integer", "com.example.Bar", null,
null, null, null));
metadata
.add(ItemMetadata.newProperty("com.example.alpha", "ddd", null, "com.example.Bar", null, null, null, null));
metadata
.add(ItemMetadata.newProperty("com.example.alpha", "ccc", null, "com.example.Foo", null, null, null, null));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
JsonMarshaller marshaller = new JsonMarshaller();
marshaller.write(metadata, outputStream);
String json = outputStream.toString();
assertThat(json).containsSubsequence("\"groups\"", "\"properties\"", "\"com.example.alpha.ccc\"",
"com.example.Foo", "\"com.example.alpha.ddd\"", "com.example.Bar", "\"com.example.bravo.aaa\"",
"com.example.Bar", "\"com.example.bravo.aaa\"", "com.example.Foo");
}
@Test
void orderingForSameGroupWithNullSourceType() throws IOException {
ConfigurationMetadata metadata = new ConfigurationMetadata();
metadata.add(ItemMetadata.newGroup("com.acme.alpha", null, "com.example.Foo", null));
metadata.add(ItemMetadata.newGroup("com.acme.alpha", null, null, null));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
JsonMarshaller marshaller = new JsonMarshaller();
marshaller.write(metadata, outputStream);
String json = outputStream.toString();
assertThat(json).containsSubsequence("\"groups\"", "\"name\": \"com.acme.alpha\"",
"\"name\": \"com.acme.alpha\"", "\"sourceType\": \"com.example.Foo\"");
}
@Test
void orderingForSamePropertyNamesWithNullSourceType() throws IOException {
ConfigurationMetadata metadata = new ConfigurationMetadata();
metadata.add(ItemMetadata.newProperty("com.example.bravo", "aaa", "java.lang.Boolean", null, null, null, null,
null));
metadata.add(ItemMetadata.newProperty("com.example.bravo", "aaa", "java.lang.Integer", "com.example.Bar", null,
null, null, null));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
JsonMarshaller marshaller = new JsonMarshaller();
marshaller.write(metadata, outputStream);
String json = outputStream.toString();
assertThat(json).containsSubsequence("\"groups\"", "\"properties\"", "\"com.example.bravo.aaa\"",
"\"java.lang.Boolean\"", "\"com.example.bravo.aaa\"", "\"java.lang.Integer\"", "\"com.example.Bar");
}
@Test
void shouldReadIgnoredProperties() throws Exception {
String json = """
{
"ignored": {
"properties": [
{
"name": "prop1"
},
{
"name": "prop2"
}
]
}
}
""";
ConfigurationMetadata metadata = read(json);
assertThat(metadata.getIgnored()).containsExactly(ItemIgnore.forProperty("prop1"),
ItemIgnore.forProperty("prop2"));
}
@Test
void shouldCheckRootFields() {
String json = """
{
"groups": [], "properties": [], "hints": [], "ignored": {}, "dummy": []
}""";
assertThatException().isThrownBy(() -> read(json))
.withMessage(
"Expected only keys [groups, hints, ignored, properties], but found additional keys [dummy]. Path: .");
}
@Test
void shouldCheckGroupFields() {
String json = """
{
"groups": [
{
"name": "g",
"type": "java.lang.String",
"description": "Some description",
"sourceType": "java.lang.String",
"sourceMethod": "some()",
"dummy": "dummy"
}
], "properties": [], "hints": []
}""";
assertThatException().isThrownBy(() -> read(json))
.withMessage(
"Expected only keys [description, name, sourceMethod, sourceType, type], but found additional keys [dummy]. Path: .groups.[0]");
}
@Test
void shouldCheckPropertyFields() {
String json = """
{
"groups": [], "properties": [
{
"name": "name",
"type": "java.lang.String",
"description": "Some description",
"sourceType": "java.lang.String",
"defaultValue": "value",
"deprecation": {
"level": "warning",
"reason": "some reason",
"replacement": "name-new",
"since": "v17"
},
"deprecated": true,
"dummy": "dummy"
}
], "hints": []
}""";
assertThatException().isThrownBy(() -> read(json))
.withMessage(
"Expected only keys [defaultValue, deprecated, deprecation, description, name, sourceType, type], but found additional keys [dummy]. Path: .properties.[0]");
}
@Test
void shouldCheckPropertyDeprecationFields() {
String json = """
{
"groups": [], "properties": [
{
"name": "name",
"type": "java.lang.String",
"description": "Some description",
"sourceType": "java.lang.String",
"defaultValue": "value",
"deprecation": {
"level": "warning",
"reason": "some reason",
"replacement": "name-new",
"since": "v17",
"dummy": "dummy"
},
"deprecated": true
}
], "hints": []
}""";
assertThatException().isThrownBy(() -> read(json))
.withMessage(
"Expected only keys [level, reason, replacement, since], but found additional keys [dummy]. Path: .properties.[0].deprecation");
}
@Test
void shouldCheckHintFields() {
String json = """
{
"groups": [], "properties": [], "hints": [
{
"name": "name",
"values": [],
"providers": [],
"dummy": "dummy"
}
]
}""";
assertThatException().isThrownBy(() -> read(json))
.withMessage(
"Expected only keys [name, providers, values], but found additional keys [dummy]. Path: .hints.[0]");
}
@Test
void shouldCheckHintValueFields() {
String json = """
{
"groups": [], "properties": [], "hints": [
{
"name": "name",
"values": [
{
"value": "value",
"description": "some description",
"dummy": "dummy"
}
],
"providers": []
}
]
}""";
assertThatException().isThrownBy(() -> read(json))
.withMessage(
"Expected only keys [description, value], but found additional keys [dummy]. Path: .hints.[0].values.[0]");
}
@Test
void shouldCheckHintProviderFields() {
String json = """
{
"groups": [], "properties": [], "hints": [
{
"name": "name",
"values": [],
"providers": [
{
"name": "name",
"parameters": {
"target": "jakarta.servlet.http.HttpServlet"
},
"dummy": "dummy"
}
]
}
]
}""";
assertThatException().isThrownBy(() -> read(json))
.withMessage(
"Expected only keys [name, parameters], but found additional keys [dummy]. Path: .hints.[0].providers.[0]");
}
@Test
void shouldCheckIgnoredFields() {
String json = """
{
"ignored": {
"properties": [],
"dummy": {}
}
}
""";
assertThatException().isThrownBy(() -> read(json))
.withMessage("Expected only keys [properties], but found additional keys [dummy]. Path: .ignored");
}
@Test
void shouldCheckIgnoredPropertiesFields() {
String json = """
{
"ignored": {
"properties": [
{
"name": "prop1",
"dummy": true
}
]
}
}
""";
assertThatException().isThrownBy(() -> read(json))
.withMessage("Expected only keys [name], but found additional keys [dummy]. Path: .ignored.properties.[0]");
}
private ConfigurationMetadata read(String json) throws Exception {
JsonMarshaller marshaller = new JsonMarshaller();
return marshaller.read(new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)));
}
}
| JsonMarshallerTests |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/MethodParameterTests.java | {
"start": 1355,
"end": 11884
} | class ____ {
private Method method;
private MethodParameter stringParameter;
private MethodParameter longParameter;
private MethodParameter intReturnType;
private MethodParameter jspecifyNullableParameter;
private MethodParameter jspecifyNonNullParameter;
private MethodParameter springNullableParameter;
private MethodParameter springNonNullParameter;
@BeforeEach
void setup() throws NoSuchMethodException {
method = getClass().getMethod("method", String.class, long.class);
stringParameter = new MethodParameter(method, 0);
longParameter = new MethodParameter(method, 1);
intReturnType = new MethodParameter(method, -1);
Method jspecifyNullableMethod = getClass().getMethod("jspecifyNullableMethod", String.class, String.class);
jspecifyNullableParameter = new MethodParameter(jspecifyNullableMethod, 0);
jspecifyNonNullParameter = new MethodParameter(jspecifyNullableMethod, 1);
Method springNullableMethod = getClass().getMethod("springNullableMethod", String.class, String.class);
springNullableParameter = new MethodParameter(springNullableMethod, 0);
springNonNullParameter = new MethodParameter(springNullableMethod, 1);
}
@Test
void equals() throws NoSuchMethodException {
assertThat(stringParameter).isEqualTo(stringParameter);
assertThat(longParameter).isEqualTo(longParameter);
assertThat(intReturnType).isEqualTo(intReturnType);
assertThat(stringParameter).isNotEqualTo(longParameter);
assertThat(stringParameter).isNotEqualTo(intReturnType);
assertThat(longParameter).isNotEqualTo(stringParameter);
assertThat(longParameter).isNotEqualTo(intReturnType);
assertThat(intReturnType).isNotEqualTo(stringParameter);
assertThat(intReturnType).isNotEqualTo(longParameter);
Method method = getClass().getMethod("method", String.class, long.class);
MethodParameter methodParameter = new MethodParameter(method, 0);
assertThat(methodParameter).isEqualTo(stringParameter);
assertThat(stringParameter).isEqualTo(methodParameter);
assertThat(methodParameter).isNotEqualTo(longParameter);
assertThat(longParameter).isNotEqualTo(methodParameter);
}
@Test
void testHashCode() throws NoSuchMethodException {
assertThat(stringParameter.hashCode()).isEqualTo(stringParameter.hashCode());
assertThat(longParameter.hashCode()).isEqualTo(longParameter.hashCode());
assertThat(intReturnType.hashCode()).isEqualTo(intReturnType.hashCode());
Method method = getClass().getMethod("method", String.class, long.class);
MethodParameter methodParameter = new MethodParameter(method, 0);
assertThat(methodParameter.hashCode()).isEqualTo(stringParameter.hashCode());
assertThat(methodParameter.hashCode()).isNotEqualTo(longParameter.hashCode());
}
@Test
@SuppressWarnings("deprecation")
void testFactoryMethods() {
assertThat(MethodParameter.forMethodOrConstructor(method, 0)).isEqualTo(stringParameter);
assertThat(MethodParameter.forMethodOrConstructor(method, 1)).isEqualTo(longParameter);
assertThat(MethodParameter.forExecutable(method, 0)).isEqualTo(stringParameter);
assertThat(MethodParameter.forExecutable(method, 1)).isEqualTo(longParameter);
assertThat(MethodParameter.forParameter(method.getParameters()[0])).isEqualTo(stringParameter);
assertThat(MethodParameter.forParameter(method.getParameters()[1])).isEqualTo(longParameter);
}
@Test
void indexValidation() {
assertThatIllegalArgumentException().isThrownBy(() ->
new MethodParameter(method, 2));
}
@Test
void annotatedConstructorParameterInStaticNestedClass() throws Exception {
Constructor<?> constructor = NestedClass.class.getDeclaredConstructor(String.class);
MethodParameter methodParameter = MethodParameter.forExecutable(constructor, 0);
assertThat(methodParameter.getParameterType()).isEqualTo(String.class);
assertThat(methodParameter.getParameterAnnotation(Param.class)).as("Failed to find @Param annotation").isNotNull();
}
@Test // SPR-16652
void annotatedConstructorParameterInInnerClass() throws Exception {
Constructor<?> constructor = InnerClass.class.getConstructor(getClass(), String.class, Callable.class);
MethodParameter methodParameter = MethodParameter.forExecutable(constructor, 0);
assertThat(methodParameter.getParameterType()).isEqualTo(getClass());
assertThat(methodParameter.getParameterAnnotation(Param.class)).isNull();
methodParameter = MethodParameter.forExecutable(constructor, 1);
assertThat(methodParameter.getParameterType()).isEqualTo(String.class);
assertThat(methodParameter.getParameterAnnotation(Param.class)).as("Failed to find @Param annotation").isNotNull();
methodParameter = MethodParameter.forExecutable(constructor, 2);
assertThat(methodParameter.getParameterType()).isEqualTo(Callable.class);
assertThat(methodParameter.getParameterAnnotation(Param.class)).isNull();
}
@Test // SPR-16734
void genericConstructorParameterInInnerClass() throws Exception {
Constructor<?> constructor = InnerClass.class.getConstructor(getClass(), String.class, Callable.class);
MethodParameter methodParameter = MethodParameter.forExecutable(constructor, 0);
assertThat(methodParameter.getParameterType()).isEqualTo(getClass());
assertThat(methodParameter.getGenericParameterType()).isEqualTo(getClass());
methodParameter = MethodParameter.forExecutable(constructor, 1);
assertThat(methodParameter.getParameterType()).isEqualTo(String.class);
assertThat(methodParameter.getGenericParameterType()).isEqualTo(String.class);
methodParameter = MethodParameter.forExecutable(constructor, 2);
assertThat(methodParameter.getParameterType()).isEqualTo(Callable.class);
assertThat(methodParameter.getGenericParameterType()).isEqualTo(ResolvableType.forClassWithGenerics(Callable.class, Integer.class).getType());
}
@Test
@Deprecated
void multipleResolveParameterTypeCalls() throws Exception {
Method method = ArrayList.class.getMethod("get", int.class);
MethodParameter methodParameter = MethodParameter.forExecutable(method, -1);
assertThat(methodParameter.getParameterType()).isEqualTo(Object.class);
GenericTypeResolver.resolveParameterType(methodParameter, StringList.class);
assertThat(methodParameter.getParameterType()).isEqualTo(String.class);
GenericTypeResolver.resolveParameterType(methodParameter, IntegerList.class);
assertThat(methodParameter.getParameterType()).isEqualTo(Integer.class);
}
@Test
void equalsAndHashCodeConsidersContainingClass() throws Exception {
Method method = ArrayList.class.getMethod("get", int.class);
MethodParameter m1 = MethodParameter.forExecutable(method, -1);
MethodParameter m2 = MethodParameter.forExecutable(method, -1);
MethodParameter m3 = MethodParameter.forExecutable(method, -1).nested();
assertThat(m1).isEqualTo(m2).isNotEqualTo(m3);
assertThat(m1.hashCode()).isEqualTo(m2.hashCode());
}
@Test
void equalsAndHashCodeConsidersNesting() throws Exception {
Method method = ArrayList.class.getMethod("get", int.class);
MethodParameter m1 = MethodParameter.forExecutable(method, -1)
.withContainingClass(StringList.class);
MethodParameter m2 = MethodParameter.forExecutable(method, -1)
.withContainingClass(StringList.class);
MethodParameter m3 = MethodParameter.forExecutable(method, -1)
.withContainingClass(IntegerList.class);
MethodParameter m4 = MethodParameter.forExecutable(method, -1);
assertThat(m1).isEqualTo(m2).isNotEqualTo(m3).isNotEqualTo(m4);
assertThat(m1.hashCode()).isEqualTo(m2.hashCode());
}
@Test
void withContainingClassReturnsNewInstance() throws Exception {
Method method = ArrayList.class.getMethod("get", int.class);
MethodParameter m1 = MethodParameter.forExecutable(method, -1);
MethodParameter m2 = m1.withContainingClass(StringList.class);
MethodParameter m3 = m1.withContainingClass(IntegerList.class);
assertThat(m1).isNotSameAs(m2).isNotSameAs(m3);
assertThat(m1.getParameterType()).isEqualTo(Object.class);
assertThat(m2.getParameterType()).isEqualTo(String.class);
assertThat(m3.getParameterType()).isEqualTo(Integer.class);
}
@Test
void withTypeIndexReturnsNewInstance() throws Exception {
Method method = ArrayList.class.getMethod("get", int.class);
MethodParameter m1 = MethodParameter.forExecutable(method, -1);
MethodParameter m2 = m1.withTypeIndex(2);
MethodParameter m3 = m1.withTypeIndex(3);
assertThat(m1).isNotSameAs(m2).isNotSameAs(m3);
assertThat(m1.getTypeIndexForCurrentLevel()).isNull();
assertThat(m2.getTypeIndexForCurrentLevel()).isEqualTo(2);
assertThat(m3.getTypeIndexForCurrentLevel()).isEqualTo(3);
}
@Test
@SuppressWarnings("deprecation")
void mutatingNestingLevelShouldNotChangeNewInstance() throws Exception {
Method method = ArrayList.class.getMethod("get", int.class);
MethodParameter m1 = MethodParameter.forExecutable(method, -1);
MethodParameter m2 = m1.withTypeIndex(2);
assertThat(m2.getTypeIndexForCurrentLevel()).isEqualTo(2);
m1.setTypeIndexForCurrentLevel(1);
m2.decreaseNestingLevel();
assertThat(m2.getTypeIndexForCurrentLevel()).isNull();
}
@Test
void nestedWithTypeIndexReturnsNewInstance() throws Exception {
Method method = ArrayList.class.getMethod("get", int.class);
MethodParameter m1 = MethodParameter.forExecutable(method, -1);
MethodParameter m2 = m1.nested(2);
MethodParameter m3 = m1.nested(3);
assertThat(m1).isNotSameAs(m2).isNotSameAs(m3);
assertThat(m1.getTypeIndexForCurrentLevel()).isNull();
assertThat(m2.getTypeIndexForCurrentLevel()).isEqualTo(2);
assertThat(m3.getTypeIndexForCurrentLevel()).isEqualTo(3);
}
@Test
void jspecifyNullableParameter() {
assertThat(jspecifyNullableParameter.isOptional()).isTrue();
}
@Test
void jspecifyNonNullParameter() {
assertThat(jspecifyNonNullParameter.isOptional()).isFalse();
}
@Test
void springNullableParameter() {
assertThat(springNullableParameter.isOptional()).isTrue();
}
@Test
void springNonNullParameter() {
assertThat(springNonNullParameter.isOptional()).isFalse();
}
public int method(String p1, long p2) {
return 42;
}
public @org.jspecify.annotations.Nullable String jspecifyNullableMethod(@org.jspecify.annotations.Nullable String nullableParameter, String nonNullParameter) {
return nullableParameter;
}
@SuppressWarnings("deprecation")
@org.springframework.lang.Nullable
public String springNullableMethod(@org.springframework.lang.Nullable String nullableParameter, String nonNullParameter) {
return nullableParameter;
}
@SuppressWarnings("unused")
private static | MethodParameterTests |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ext/javatime/deser/MonthDayDeserTest.java | {
"start": 696,
"end": 1004
} | class ____ extends DateTimeTestBase
{
private final ObjectMapper MAPPER = newMapper();
private final ObjectReader READER = MAPPER.readerFor(MonthDay.class);
private final TypeReference<Map<String, MonthDay>> MAP_TYPE_REF = new TypeReference<Map<String, MonthDay>>() { };
static | MonthDayDeserTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.