language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringErrorTests.java
|
{
"start": 1227,
"end": 3807
}
|
class ____ extends ErrorsForCasesWithoutExamplesTestCase {
@Override
protected List<TestCaseSupplier> cases() {
return paramsToSuppliers(QueryStringTests.parameters());
}
@Override
protected Stream<List<DataType>> testCandidates(List<TestCaseSupplier> cases, Set<List<DataType>> valid) {
// Don't test null, as it is not allowed but the expected message is not a type error - so we check it separately in VerifierTests
return super.testCandidates(cases, valid).filter(sig -> false == sig.contains(DataType.NULL));
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new QueryString(source, args.getFirst(), args.size() > 1 ? args.get(1) : null, EsqlTestUtils.TEST_CFG);
}
@Override
protected Matcher<String> expectedTypeErrorMatcher(List<Set<DataType>> validPerPosition, List<DataType> signature) {
return equalTo(errorMessageStringForMatch(validPerPosition, signature, (l, p) -> "keyword, text"));
}
private static String errorMessageStringForMatch(
List<Set<DataType>> validPerPosition,
List<DataType> signature,
AbstractFunctionTestCase.PositionalErrorMessageSupplier positionalErrorMessageSupplier
) {
boolean invalid = false;
for (int i = 0; i < signature.size() && invalid == false; i++) {
// Need to check for nulls and bad parameters in order
if (signature.get(i) == DataType.NULL) {
return TypeResolutions.ParamOrdinal.fromIndex(i).name().toLowerCase(Locale.ROOT)
+ " argument of ["
+ sourceForSignature(signature)
+ "] cannot be null, received []";
}
if (validPerPosition.get(i).contains(signature.get(i)) == false) {
// Map expressions have different error messages
if (i == 1) {
return format(null, "second argument of [{}] must be a map expression, received []", sourceForSignature(signature));
}
break;
}
}
try {
return typeErrorMessage(true, validPerPosition, signature, positionalErrorMessageSupplier);
} catch (IllegalStateException e) {
// This means all the positional args were okay, so the expected error is for nulls or from the combination
return EsqlBinaryComparison.formatIncompatibleTypesMessage(signature.get(0), signature.get(1), sourceForSignature(signature));
}
}
}
|
QueryStringErrorTests
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/internal/entities/mapper/PersistentCollectionChangeData.java
|
{
"start": 427,
"end": 1721
}
|
class ____ {
private final String entityName;
private final Map<String, Object> data;
private final Object changedElement;
public PersistentCollectionChangeData(String entityName, Map<String, Object> data, Object changedElement) {
this.entityName = entityName;
this.data = data;
this.changedElement = changedElement;
}
/**
* @return Name of the (middle) entity that holds the collection data.
*/
public String getEntityName() {
return entityName;
}
public Map<String, Object> getData() {
return data;
}
/**
* @return The affected element, which was changed (added, removed, modified) in the collection.
*/
public Object getChangedElement() {
if ( changedElement instanceof Pair ) {
return ( (Pair) changedElement ).getSecond();
}
if ( changedElement instanceof Map.Entry ) {
return ( (Map.Entry) changedElement ).getValue();
}
return changedElement;
}
/**
* @return Index of the affected element, or {@code null} if the collection isn't indexed.
*/
public Object getChangedElementIndex() {
if ( changedElement instanceof Pair ) {
return ( (Pair) changedElement ).getFirst();
}
if ( changedElement instanceof Map.Entry ) {
return ( (Map.Entry) changedElement ).getKey();
}
return null;
}
}
|
PersistentCollectionChangeData
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/NonApiTypeTest.java
|
{
"start": 6034,
"end": 6728
}
|
class ____ {
// BUG: Diagnostic contains: ImmutableIntArray
public int[] testInts() {
return null;
}
// BUG: Diagnostic contains: ImmutableDoubleArray
public void testDoubles1(double[] values) {}
// BUG: Diagnostic contains: ImmutableDoubleArray
public void testDoubles2(Double[] values) {}
}
""")
.doTest();
}
@Test
public void protoTime() {
helper
.addSourceLines(
"Test.java",
"""
import com.google.protobuf.Duration;
import com.google.protobuf.Timestamp;
public
|
Test
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/config/LocationAwareReliabilityStrategy.java
|
{
"start": 1433,
"end": 1973
}
|
class ____ of the caller.
* @param location The location of the caller or null.
* @param marker A Marker or null if none is present.
* @param level The event Level.
* @param data The Message.
* @param t A Throwable or null.
* @since 3.0
*/
void log(
Supplier<LoggerConfig> reconfigured,
String loggerName,
String fqcn,
StackTraceElement location,
Marker marker,
Level level,
Message data,
Throwable t);
}
|
name
|
java
|
quarkusio__quarkus
|
extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/SecurityContextConfig.java
|
{
"start": 3639,
"end": 4133
}
|
enum ____ {
/**
* It indicates that volume's ownership and permissions will be changed only when permission and ownership of root
* directory does not match with expected permissions on the volume.
*/
OnRootMismatch,
/**
* It indicates that volume's ownership and permissions should always be changed whenever volume is mounted inside a
* Pod. This the default behavior.
*/
Always;
}
}
|
PodFSGroupChangePolicy
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/support/EntityManagerBeanDefinitionRegistrarPostProcessorIntegrationTests.java
|
{
"start": 1707,
"end": 2144
}
|
class ____ {
@Autowired EntityManagerInjectionTarget target;
@Test // DATAJPA-445
void injectsEntityManagerIntoConstructors() {
assertThat(target).isNotNull();
assertThat(target.firstEm).isNotNull();
assertThat(target.primaryEm).isNotNull();
}
@Configuration
@Import(EntityManagerInjectionTarget.class)
@ImportResource("classpath:infrastructure.xml")
static
|
EntityManagerBeanDefinitionRegistrarPostProcessorIntegrationTests
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/QuickfixComponentBuilderFactory.java
|
{
"start": 1416,
"end": 1924
}
|
interface ____ {
/**
* QuickFix (camel-quickfix)
* Open a Financial Interchange (FIX) session using an embedded QuickFix/J
* engine.
*
* Category: messaging
* Since: 2.1
* Maven coordinates: org.apache.camel:camel-quickfix
*
* @return the dsl builder
*/
static QuickfixComponentBuilder quickfix() {
return new QuickfixComponentBuilderImpl();
}
/**
* Builder for the QuickFix component.
*/
|
QuickfixComponentBuilderFactory
|
java
|
apache__flink
|
flink-kubernetes/src/main/java/org/apache/flink/kubernetes/configuration/KubernetesLeaderElectionConfiguration.java
|
{
"start": 1070,
"end": 2436
}
|
class ____ {
private final String clusterId;
private final String configMapName;
private final String lockIdentity;
private final Duration leaseDuration;
private final Duration renewDeadline;
private final Duration retryPeriod;
public KubernetesLeaderElectionConfiguration(
String configMapName, String lockIdentity, Configuration config) {
this.clusterId = config.get(KubernetesConfigOptions.CLUSTER_ID);
this.configMapName = configMapName;
this.lockIdentity = lockIdentity;
this.leaseDuration =
config.get(KubernetesHighAvailabilityOptions.KUBERNETES_LEASE_DURATION);
this.renewDeadline =
config.get(KubernetesHighAvailabilityOptions.KUBERNETES_RENEW_DEADLINE);
this.retryPeriod = config.get(KubernetesHighAvailabilityOptions.KUBERNETES_RETRY_PERIOD);
}
public String getClusterId() {
return clusterId;
}
public String getConfigMapName() {
return configMapName;
}
public String getLockIdentity() {
return lockIdentity;
}
public Duration getLeaseDuration() {
return leaseDuration;
}
public Duration getRenewDeadline() {
return renewDeadline;
}
public Duration getRetryPeriod() {
return retryPeriod;
}
}
|
KubernetesLeaderElectionConfiguration
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/InfluxDb2EndpointBuilderFactory.java
|
{
"start": 1571,
"end": 7701
}
|
interface ____
extends
EndpointProducerBuilder {
default AdvancedInfluxDb2EndpointBuilder advanced() {
return (AdvancedInfluxDb2EndpointBuilder) this;
}
/**
* Define if we want to auto create the bucket if it's not present.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param autoCreateBucket the value to set
* @return the dsl builder
*/
default InfluxDb2EndpointBuilder autoCreateBucket(boolean autoCreateBucket) {
doSetProperty("autoCreateBucket", autoCreateBucket);
return this;
}
/**
* Define if we want to auto create the bucket if it's not present.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param autoCreateBucket the value to set
* @return the dsl builder
*/
default InfluxDb2EndpointBuilder autoCreateBucket(String autoCreateBucket) {
doSetProperty("autoCreateBucket", autoCreateBucket);
return this;
}
/**
* Define if we want to auto create the organization if it's not
* present.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param autoCreateOrg the value to set
* @return the dsl builder
*/
default InfluxDb2EndpointBuilder autoCreateOrg(boolean autoCreateOrg) {
doSetProperty("autoCreateOrg", autoCreateOrg);
return this;
}
/**
* Define if we want to auto create the organization if it's not
* present.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param autoCreateOrg the value to set
* @return the dsl builder
*/
default InfluxDb2EndpointBuilder autoCreateOrg(String autoCreateOrg) {
doSetProperty("autoCreateOrg", autoCreateOrg);
return this;
}
/**
* The name of the bucket where the time series will be stored.
*
* The option is a: <code>java.lang.String</code> type.
*
* Required: true
* Group: producer
*
* @param bucket the value to set
* @return the dsl builder
*/
default InfluxDb2EndpointBuilder bucket(String bucket) {
doSetProperty("bucket", bucket);
return this;
}
/**
* Define if this operation is an insert of ping.
*
* The option is a:
* <code>org.apache.camel.component.influxdb2.enums.Operation</code>
* type.
*
* Default: INSERT
* Group: producer
*
* @param operation the value to set
* @return the dsl builder
*/
default InfluxDb2EndpointBuilder operation(org.apache.camel.component.influxdb2.enums.Operation operation) {
doSetProperty("operation", operation);
return this;
}
/**
* Define if this operation is an insert of ping.
*
* The option will be converted to a
* <code>org.apache.camel.component.influxdb2.enums.Operation</code>
* type.
*
* Default: INSERT
* Group: producer
*
* @param operation the value to set
* @return the dsl builder
*/
default InfluxDb2EndpointBuilder operation(String operation) {
doSetProperty("operation", operation);
return this;
}
/**
* The name of the organization where the time series will be stored.
*
* The option is a: <code>java.lang.String</code> type.
*
* Required: true
* Group: producer
*
* @param org the value to set
* @return the dsl builder
*/
default InfluxDb2EndpointBuilder org(String org) {
doSetProperty("org", org);
return this;
}
/**
* Define the retention policy to the data created by the endpoint.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: default
* Group: producer
*
* @param retentionPolicy the value to set
* @return the dsl builder
*/
default InfluxDb2EndpointBuilder retentionPolicy(String retentionPolicy) {
doSetProperty("retentionPolicy", retentionPolicy);
return this;
}
/**
* The format or precision of time series timestamps.
*
* The option is a:
* <code>com.influxdb.client.domain.WritePrecision</code> type.
*
* Default: ms
* Group: producer
*
* @param writePrecision the value to set
* @return the dsl builder
*/
default InfluxDb2EndpointBuilder writePrecision(com.influxdb.client.domain.WritePrecision writePrecision) {
doSetProperty("writePrecision", writePrecision);
return this;
}
/**
* The format or precision of time series timestamps.
*
* The option will be converted to a
* <code>com.influxdb.client.domain.WritePrecision</code> type.
*
* Default: ms
* Group: producer
*
* @param writePrecision the value to set
* @return the dsl builder
*/
default InfluxDb2EndpointBuilder writePrecision(String writePrecision) {
doSetProperty("writePrecision", writePrecision);
return this;
}
}
/**
* Advanced builder for endpoint for the InfluxDB2 component.
*/
public
|
InfluxDb2EndpointBuilder
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/path/PathAssert_content_Test.java
|
{
"start": 1140,
"end": 2127
}
|
class ____ extends PathAssertBaseTest implements NavigationMethodWithComparatorBaseTest<PathAssert> {
@Override
protected PathAssert invoke_api_method() {
assertions.content();
return assertions;
}
@Override
protected void verify_internal_effects() {
verify(paths).assertIsReadable(getInfo(assertions), getActual(assertions));
}
@Override
protected PathAssert create_assertions() {
return new PathAssert(resourcePath("actual_file.txt"));
}
@Test
public void should_return_StringAssert_on_path_content() {
// GIVEN
Path path = resourcePath("actual_file.txt");
// WHEN
AbstractStringAssert<?> stringAssert = assertThat(path).content();
// THEN
stringAssert.isEqualTo("actual%n".formatted());
}
@Override
public PathAssert getAssertion() {
return assertions;
}
@Override
public AbstractAssert<?, ?> invoke_navigation_method(PathAssert assertion) {
return assertion.content();
}
}
|
PathAssert_content_Test
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/api/AbstractObjectAssert.java
|
{
"start": 15932,
"end": 37235
}
|
class ____ {
*
* private String name;
* public int age;
*
* public String getName() {
* return this.name;
* }
* }
*
* TolkienCharacter frodo = new TolkienCharacter("Frodo", 33);
*
* // assertion succeeds:
* assertThat(frodo).hasOnlyFields("name", "age");
*
* // assertions fail:
* assertThat(frodo).hasOnlyFields("name");
* assertThat(frodo).hasOnlyFields("not_exists");
* assertThat(frodo).hasOnlyFields(null);</code></pre>
*
* @param expectedFieldNames the expected field names actual should have
* @return {@code this} assertion object.
* @throws AssertionError if the actual object is {@code null}.
* @throws IllegalArgumentException if expectedFieldNames is {@code null}.
* @throws AssertionError if the actual object does not have the expected fields (without extra ones)
* @since 3.19.0
*/
public SELF hasOnlyFields(String... expectedFieldNames) {
objects.assertHasOnlyFields(info, actual, expectedFieldNames);
return myself;
}
/**
* Extracts the values of given fields/properties from the object under test into a list, this new list becoming
* the object under test.
* <p>
* If you extract "id", "name" and "email" fields/properties then the list will contain the id, name and email values
* of the object under test, you can then perform list assertions on the extracted values.
* <p>
* If the object under test is a {@link Map} with {@link String} keys, extracting will extract values matching the given fields/properties.
* <p>
* Nested fields/properties are supported, specifying "address.street.number" is equivalent to:
* <pre><code class='java'> // "address.street.number" corresponding to pojo properties
* actual.getAddress().getStreet().getNumber();</code></pre>
* or if address is a {@link Map}:
* <pre><code class='java'> // "address" is a Map property (that is getAddress() returns a Map)
* actual.getAddress().get("street").getNumber();</code></pre>
* <p>
* Private fields can be extracted unless you call {@link Assertions#setAllowExtractingPrivateFields(boolean) Assertions.setAllowExtractingPrivateFields(false)}.
* <p>
* Example:
* <pre><code class='java'> // Create frodo, setting its name, age and Race (Race having a name property)
* TolkienCharacter frodo = new TolkienCharacter("Frodo", 33, HOBBIT);
*
* // let's verify Frodo's name, age and race name:
* assertThat(frodo).extracting("name", "age", "race.name")
* .containsExactly("Frodo", 33, "Hobbit");</code></pre>
* <p>
* A property with the given name is looked for first, if it doesn't exist then a field with the given name is looked
* for, if the field is not accessible (i.e. does not exist) an {@link IntrospectionError} is thrown.
* <p>
* Note that the order of extracted values is consistent with the order of the given property/field.
*
* @param propertiesOrFields the properties/fields to extract from the initial object under test
* @return a new assertion object whose object under test is the list containing the extracted properties/fields values
* @throws IntrospectionError if one of the given name does not match a field or property
*/
@CheckReturnValue
public AbstractListAssert<?, List<?>, Object, ObjectAssert<Object>> extracting(String... propertiesOrFields) {
Tuple values = byName(propertiesOrFields).apply(actual);
String extractedPropertiesOrFieldsDescription = extractedDescriptionOf(propertiesOrFields);
String description = mostRelevantDescription(info.description(), extractedPropertiesOrFieldsDescription);
return newListAssertInstance(values.toList()).withAssertionState(myself).as(description);
}
/**
* Extracts the value of given field/property from the object under test, the extracted value becoming the new object under test.
* <p>
* If the object under test is a {@link Map}, the {@code propertyOrField} parameter is used as a key to the map.
* <p>
* Nested fields/properties are supported, specifying "address.street.number" is equivalent to:
* <pre><code class='java'> // "address.street.number" corresponding to pojo properties
* actual.getAddress().getStreet().getNumber();</code></pre>
* or if address is a {@link Map}:
* <pre><code class='java'> // "address" is a Map property (that is getAddress() returns a Map)
* actual.getAddress().get("street").getNumber();</code></pre>
* <p>
* Private field can be extracted unless you call {@link Assertions#setAllowExtractingPrivateFields(boolean) Assertions.setAllowExtractingPrivateFields(false)}.
* <p>
* Note that since the value is extracted as an Object, only Object assertions can be chained after extracting.
* <p>
* Example:
* <pre><code class='java'> // Create frodo, setting its name, age and Race (Race having a name property)
* TolkienCharacter frodo = new TolkienCharacter("Frodo", 33, HOBBIT);
*
* // let's extract and verify Frodo's name:
* assertThat(frodo).extracting("name")
* .isEqualTo("Frodo");
* // or its race name:
* assertThat(frodo).extracting("race.name")
* .isEqualTo("Hobbit");
*
* // The extracted value being a String, we would like to use String assertions but we can't due to Java generics limitations.
* // The following assertion does NOT compile:
* assertThat(frodo).extracting("name")
* .startsWith("Fro");
*
* // To get String assertions, use {@link #extracting(String, InstanceOfAssertFactory)}:
* assertThat(frodo).extracting("name", as(InstanceOfAssertFactories.STRING))
* .startsWith("Fro");</code></pre>
* <p>
* A property with the given name is looked for first, if it doesn't exist then a field with the given name is looked
* for, if the field is not accessible (i.e. does not exist) an {@link IntrospectionError} is thrown.
*
* @param propertyOrField the property/field to extract from the initial object under test
* @return a new {@link ObjectAssert} instance whose object under test is the extracted property/field value
* @throws IntrospectionError if one of the given name does not match a field or property
* @see #extracting(String, InstanceOfAssertFactory)
* @since 3.13.0
*/
@CheckReturnValue
public AbstractObjectAssert<?, ?> extracting(String propertyOrField) {
AssertFactory<Object, AbstractObjectAssert<?, Object>> assertFactory = this::newObjectAssert;
return super.extracting(propertyOrField, assertFactory);
}
/**
* Extracts the value of given field/property from the object under test, the extracted value becoming the new object under test.
* <p>
* If the object under test is a {@link Map}, the {@code propertyOrField} parameter is used as a key to the map.
* <p>
* Nested field/property is supported, specifying "address.street.number" is equivalent to get the value
* corresponding to actual.getAddress().getStreet().getNumber()
* <p>
* Private field can be extracted unless you call {@link Assertions#setAllowExtractingPrivateFields(boolean) Assertions.setAllowExtractingPrivateFields(false)}.
* <p>
* The {@code assertFactory} parameter allows to specify an {@link InstanceOfAssertFactory}, which is used to get the
* assertions narrowed to the factory type.
* <p>
* Wrapping the given {@link InstanceOfAssertFactory} with {@link Assertions#as(InstanceOfAssertFactory)} makes the
* assertion more readable.
* <p>
* Example:
* <pre><code class='java'> // Create frodo, setting its name, age and Race (Race having a name property)
* TolkienCharacter frodo = new TolkienCharacter("Frodo", 33, HOBBIT);
*
* // let's extract and verify Frodo's name:
* assertThat(frodo).extracting("name", as(InstanceOfAssertFactories.STRING))
* .startsWith("Fro");
*
* // The following assertion will fail as Frodo's name is not an Integer:
* assertThat(frodo).extracting("name", as(InstanceOfAssertFactories.INTEGER))
* .isZero();</code></pre>
* <p>
* A property with the given name is looked for first, if it doesn't exist then a field with the given name is looked
* for, if the field is not accessible (i.e. does not exist) an {@link IntrospectionError} is thrown.
*
* @param <ASSERT> the type of the resulting {@code Assert}
* @param propertyOrField the property/field to extract from the initial object under test
* @param assertFactory the factory which verifies the type and creates the new {@code Assert}
* @return a new narrowed {@link Assert} instance whose object under test is the extracted property/field value
* @throws NullPointerException if the given factory is {@code null}
* @throws IntrospectionError if one of the given name does not match a field or property
* @since 3.14.0
*/
@CheckReturnValue
public <ASSERT extends AbstractAssert<?, ?>> ASSERT extracting(String propertyOrField,
InstanceOfAssertFactory<?, ASSERT> assertFactory) {
AssertFactory<Object, AbstractObjectAssert<?, Object>> assertFactory1 = this::newObjectAssert;
return super.extracting(propertyOrField, assertFactory1).asInstanceOf(assertFactory);
}
/**
* Uses the given {@link Function}s to extract the values from the object under test into a list, this new list becoming
* the object under test.
* <p>
* If the given {@link Function}s extract the id, name and email values then the list will contain the id, name and email values
* of the object under test, you can then perform list assertions on the extracted values.
* <p>
* Example:
* <pre><code class='java'> // Create frodo, setting its name, age and Race (Race having a name property)
* TolkienCharacter frodo = new TolkienCharacter("Frodo", 33, HOBBIT);
*
* // let's verify Frodo's name, age and race name:
* assertThat(frodo).extracting(TolkienCharacter::getName,
* character -> character.age, // public field
* character -> character.getRace().getName())
* .containsExactly("Frodo", 33, "Hobbit");</code></pre>
* <p>
* Note that the order of extracted values is consistent with the order of given extractor functions.
*
* @param extractors the extractor functions to extract values from the Object under test.
* @return a new assertion object whose object under test is the list containing the extracted values
*/
@CheckReturnValue
@SafeVarargs
public final AbstractListAssert<?, List<?>, Object, ObjectAssert<Object>> extracting(Function<? super ACTUAL, ?>... extractors) {
return extractingForProxy(extractors);
}
// This method is protected in order to be proxied for SoftAssertions / Assumptions.
// The public method for it (the one not ending with "ForProxy") is marked as final and annotated with @SafeVarargs
// in order to avoid compiler warning in user code
protected AbstractListAssert<?, List<?>, Object, ObjectAssert<Object>> extractingForProxy(Function<? super ACTUAL, ?>[] extractors) {
requireNonNull(extractors, shouldNotBeNull("extractors")::create);
List<Object> values = Stream.of(extractors)
.map(extractor -> extractor.apply(actual))
.collect(toList());
return newListAssertInstance(values).withAssertionState(myself);
}
/**
* Uses the given {@link Function} to extract a value from the object under test, the extracted value becoming the new object under test.
* <p>
* Note that since the value is extracted as an Object, only Object assertions can be chained after extracting.
* <p>
* Example:
* <pre><code class='java'> // Create frodo, setting its name, age and Race
* TolkienCharacter frodo = new TolkienCharacter("Frodo", 33, HOBBIT);
*
* // let's extract and verify Frodo's name:
* assertThat(frodo).extracting(TolkienCharacter::getName)
* .isEqualTo("Frodo");
*
* // The extracted value being a String, we would like to use String assertions but we can't due to Java generics limitations.
* // The following assertion does NOT compile:
* assertThat(frodo).extracting(TolkienCharacter::getName)
* .startsWith("Fro");
*
* // To get String assertions, use {@link #extracting(Function, InstanceOfAssertFactory)}:
* assertThat(frodo).extracting(TolkienCharacter::getName, as(InstanceOfAssertFactories.STRING))
* .startsWith("Fro");</code></pre>
*
* @param <T> the expected extracted value type.
* @param extractor the extractor function used to extract the value from the object under test.
* @return a new {@link ObjectAssert} instance whose object under test is the extracted value
* @see #extracting(Function, InstanceOfAssertFactory)
* @since 3.11.0
*/
@CheckReturnValue
public <T> AbstractObjectAssert<?, T> extracting(Function<? super ACTUAL, T> extractor) {
AssertFactory<T, AbstractObjectAssert<?, T>> assertFactory = this::newObjectAssert;
return super.extracting(extractor, assertFactory);
}
/**
* Uses the given {@link Function} to extract a value from the object under test, the extracted value becoming the new object under test.
* <p>
* Note that since the value is extracted as an Object, only Object assertions can be chained after extracting.
* <p>
* The {@code assertFactory} parameter allows to specify an {@link InstanceOfAssertFactory}, which is used to get the
* assertions narrowed to the factory type.
* <p>
* Wrapping the given {@link InstanceOfAssertFactory} with {@link Assertions#as(InstanceOfAssertFactory)} makes the
* assertion more readable.
* <p>
* Example:
* <pre><code class='java'> // Create frodo, setting its name, age and Race
* TolkienCharacter frodo = new TolkienCharacter("Frodo", 33, HOBBIT);
*
* // let's extract and verify Frodo's name:
* assertThat(frodo).extracting(TolkienCharacter::getName, as(InstanceOfAssertFactories.STRING))
* .startsWith("Fro");
*
* // The following assertion will fail as Frodo's name is not an Integer:
* assertThat(frodo).extracting(TolkienCharacter::getName, as(InstanceOfAssertFactories.INTEGER))
* .isZero();</code></pre>
*
* @param <T> the expected extracted value type
* @param <ASSERT> the type of the resulting {@code Assert}
* @param extractor the extractor function used to extract the value from the object under test
* @param assertFactory the factory which verifies the type and creates the new {@code Assert}
* @return a new narrowed {@link Assert} instance whose object under test is the extracted value
* @throws NullPointerException if the given factory is {@code null}
* @since 3.14.0
*/
@CheckReturnValue
public <T, ASSERT extends AbstractAssert<?, ?>> ASSERT extracting(Function<? super ACTUAL, T> extractor,
InstanceOfAssertFactory<?, ASSERT> assertFactory) {
AssertFactory<T, AbstractObjectAssert<?, T>> factory = this::newObjectAssert;
return super.extracting(extractor, factory).asInstanceOf(assertFactory);
}
/**
* Verifies that the object under test returns the given expected value from the given {@link Function},
* a typical usage is to pass a method reference to assert object's property.
* <p>
* Wrapping the given {@link Function} with {@link Assertions#from(Function)} makes the assertion more readable.
* <p>
* The assertion supports custom comparators, configurable with {@link #usingComparatorForType(Comparator, Class)}.
* <p>
* Example:
* <pre><code class="java"> import static org.assertj.core.api.Assertions.from;
* // from is not mandatory but it makes the assertions more readable
* assertThat(frodo).returns("Frodo", from(TolkienCharacter::getName))
* .returns("Frodo", TolkienCharacter::getName) // no from :(
* .returns(HOBBIT, from(TolkienCharacter::getRace));</code></pre>
*
* @param expected the value the object under test method's call should return.
* @param from {@link Function} used to acquire the value to test from the object under test. Must not be {@code null}
* @param <T> the expected value type the given {@code method} returns.
* @return {@code this} assertion object.
* @throws AssertionError if {@code actual} is {@code null}
* @throws NullPointerException if given {@code from} function is null
* @see #usingComparatorForType(Comparator, Class)
*/
public <T> SELF returns(T expected, Function<ACTUAL, T> from) {
requireNonNull(from, "The given getter method/Function must not be null");
isNotNull();
Objects objects = getComparatorBasedObjectAssertions(expected);
objects.assertEqual(info, from.apply(actual), expected);
return myself;
}
/**
* This is an overload of {@link #returns(Object, Function)} with a description that will show up in the error
* message if the assertion fails (like calling {@link #as(String, Object...) as(String)} before the assertion).
* <p>
* Example:
* <pre><code class="java"> import static org.assertj.core.api.Assertions.from;
* // from is not mandatory but it makes the assertions more readable
* assertThat(frodo).returns("Frodo", from(TolkienCharacter::getName), "name check");
* // the previous assertion is equivalent to:
* assertThat(frodo).as("name check").returns("Frodo", from(TolkienCharacter::getName));</code></pre>
*
* @param expected the value the object under test method's call should return.
* @param from {@link Function} used to acquire the value to test from the object under test. Must not be {@code null}
* @param <T> the expected value type the given {@code method} returns.
* @param description the description that you hope to show in return.
* @return {@code this} assertion object.
* @throws NullPointerException if given {@code from} function is null
*/
public <T> SELF returns(T expected, Function<ACTUAL, T> from, String description) {
as(description);
return returns(expected, from);
}
/**
* Verifies that the object under test does not return the given expected value from the given {@link Function},
* a typical usage is to pass a method reference to assert object's property.
* <p>
* Wrapping the given {@link Function} with {@link Assertions#from(Function)} makes the assertion more readable.
* <p>
* The assertion supports custom comparators, configurable with {@link #usingComparatorForType(Comparator, Class)}.
* <p>
* Example:
* <pre><code class="java"> // from is not mandatory but it makes the assertions more readable
* assertThat(frodo).doesNotReturn("Bilbo", from(TolkienCharacter::getName))
* .doesNotReturn("Bilbo", TolkienCharacter::getName) // no from :(
* .doesNotReturn(null, from(TolkienCharacter::getRace));</code></pre>
*
* @param expected the value the object under test method's call should not return.
* @param from {@link Function} used to acquire the value to test from the object under test. Must not be {@code null}
* @param <T> the expected value type the given {@code method} returns.
* @return {@code this} assertion object.
* @throws AssertionError if {@code actual} is {@code null}
* @throws NullPointerException if given {@code from} function is null
* @see #usingComparatorForType(Comparator, Class)
* @since 3.22.0
*/
public <T> SELF doesNotReturn(T expected, Function<ACTUAL, T> from) {
requireNonNull(from, "The given getter method/Function must not be null");
isNotNull();
Objects objects = getComparatorBasedObjectAssertions(expected);
objects.assertNotEqual(info, from.apply(actual), expected);
return myself;
}
private Objects getComparatorBasedObjectAssertions(Object value) {
if (value == null) return objects;
Class<?> type = value.getClass();
TypeComparators comparatorsByType = getComparatorsByType();
if (comparatorsByType.hasComparatorForType(type)) {
return new Objects(new ComparatorBasedComparisonStrategy(comparatorsByType.getComparatorForType(type)));
}
return objects;
}
/**
* Enable using a recursive field by field comparison strategy when calling the chained {@link RecursiveComparisonAssert#isEqualTo(Object) isEqualTo} assertion.
* <p>
* The detailed documentation is available here: <a href="https://assertj.github.io/doc/#assertj-core-recursive-comparison">https://assertj.github.io/doc/#assertj-core-recursive-comparison</a>.
* <p>
* Example:
* <pre><code class='java'> public
|
TolkienCharacter
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/AutoConfigurationExcludeFilterTests.java
|
{
"start": 2574,
"end": 2788
}
|
class ____ extends AutoConfigurationExcludeFilter {
@Override
protected List<String> getAutoConfigurations() {
return Collections.singletonList(FILTERED.getName());
}
}
}
|
TestAutoConfigurationExcludeFilter
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterMissingFolderMulti.java
|
{
"start": 2710,
"end": 7041
}
|
class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TestRouterMissingFolderMulti.class);
/** Number of files to create for testing. */
private static final int NUM_FILES = 10;
/** Namenodes for the test per name service id (subcluster). */
private Map<String, MockNamenode> namenodes = new HashMap<>();
/** Routers for the test. */
private Router router;
@BeforeEach
public void setup() throws Exception {
LOG.info("Start the Namenodes");
Configuration nnConf = new HdfsConfiguration();
nnConf.setInt(DFSConfigKeys.DFS_NAMENODE_HANDLER_COUNT_KEY, 10);
for (final String nsId : asList("ns0", "ns1")) {
MockNamenode nn = new MockNamenode(nsId, nnConf);
nn.transitionToActive();
nn.addFileSystemMock();
namenodes.put(nsId, nn);
}
LOG.info("Start the Routers");
Configuration routerConf = new RouterConfigBuilder()
.stateStore()
.admin()
.rpc()
.build();
routerConf.set(RBFConfigKeys.DFS_ROUTER_RPC_ADDRESS_KEY, "0.0.0.0:0");
routerConf.set(RBFConfigKeys.DFS_ROUTER_HTTP_ADDRESS_KEY, "0.0.0.0:0");
routerConf.set(RBFConfigKeys.DFS_ROUTER_ADMIN_ADDRESS_KEY, "0.0.0.0:0");
Configuration stateStoreConf = getStateStoreConfiguration();
stateStoreConf.setClass(
RBFConfigKeys.FEDERATION_NAMENODE_RESOLVER_CLIENT_CLASS,
MembershipNamenodeResolver.class, ActiveNamenodeResolver.class);
stateStoreConf.setClass(
RBFConfigKeys.FEDERATION_FILE_RESOLVER_CLIENT_CLASS,
MultipleDestinationMountTableResolver.class,
FileSubclusterResolver.class);
routerConf.addResource(stateStoreConf);
routerConf.setBoolean(RBFConfigKeys.DFS_ROUTER_ALLOW_PARTIAL_LIST, false);
router = new Router();
router.init(routerConf);
router.start();
LOG.info("Registering the subclusters in the Routers");
registerSubclusters(router, namenodes.values());
}
@AfterEach
public void cleanup() throws Exception {
LOG.info("Stopping the cluster");
for (final MockNamenode nn : namenodes.values()) {
nn.stop();
}
namenodes.clear();
if (router != null) {
router.stop();
router = null;
}
}
@Test
public void testSuccess() throws Exception {
FileSystem fs = getFileSystem(router);
String mountPoint = "/test-success";
createMountTableEntry(router, mountPoint,
DestinationOrder.HASH_ALL, namenodes.keySet());
Path folder = new Path(mountPoint, "folder-all");
for (int i = 0; i < NUM_FILES; i++) {
Path file = new Path(folder, "file-" + i + ".txt");
FSDataOutputStream os = fs.create(file);
os.close();
}
FileStatus[] files = fs.listStatus(folder);
assertEquals(NUM_FILES, files.length);
ContentSummary contentSummary = fs.getContentSummary(folder);
assertEquals(NUM_FILES, contentSummary.getFileCount());
}
@Test
public void testFileNotFound() throws Exception {
FileSystem fs = getFileSystem(router);
String mountPoint = "/test-non-existing";
createMountTableEntry(router,
mountPoint, DestinationOrder.HASH_ALL, namenodes.keySet());
Path path = new Path(mountPoint, "folder-all");
LambdaTestUtils.intercept(FileNotFoundException.class,
() -> fs.listStatus(path));
LambdaTestUtils.intercept(FileNotFoundException.class,
() -> fs.getContentSummary(path));
}
@Test
public void testOneMissing() throws Exception {
FileSystem fs = getFileSystem(router);
String mountPoint = "/test-one-missing";
createMountTableEntry(router, mountPoint,
DestinationOrder.HASH_ALL, namenodes.keySet());
// Create the folders directly in only one of the Namenodes
MockNamenode nn = namenodes.get("ns0");
int nnRpcPort = nn.getRPCPort();
FileSystem nnFs = getFileSystem(nnRpcPort);
Path folder = new Path(mountPoint, "folder-all");
for (int i = 0; i < NUM_FILES; i++) {
Path file = new Path(folder, "file-" + i + ".txt");
FSDataOutputStream os = nnFs.create(file);
os.close();
}
FileStatus[] files = fs.listStatus(folder);
assertEquals(NUM_FILES, files.length);
ContentSummary summary = fs.getContentSummary(folder);
assertEquals(NUM_FILES, summary.getFileAndDirectoryCount());
}
}
|
TestRouterMissingFolderMulti
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestEstimateModelMemoryAction.java
|
{
"start": 887,
"end": 1629
}
|
class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(new Route(POST, BASE_PATH + "anomaly_detectors/_estimate_model_memory"));
}
@Override
public String getName() {
return "ml_estimate_model_memory_action";
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
EstimateModelMemoryAction.Request request = EstimateModelMemoryAction.Request.parseRequest(
restRequest.contentOrSourceParamParser()
);
return channel -> client.execute(EstimateModelMemoryAction.INSTANCE, request, new RestToXContentListener<>(channel));
}
}
|
RestEstimateModelMemoryAction
|
java
|
elastic__elasticsearch
|
libs/gpu-codec/src/main/java/org/elasticsearch/gpu/codec/MergedQuantizedVectorValues.java
|
{
"start": 7679,
"end": 8336
}
|
class ____ extends DocIDMerger.Sub {
private final QuantizedByteVectorValues values;
private final KnnVectorValues.DocIndexIterator iterator;
QuantizedByteVectorValueSub(MergeState.DocMap docMap, QuantizedByteVectorValues values) {
super(docMap);
this.values = values;
iterator = values.iterator();
assert iterator.docID() == -1;
}
@Override
public int nextDoc() throws IOException {
return iterator.nextDoc();
}
public int index() {
return iterator.index();
}
}
private static
|
QuantizedByteVectorValueSub
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/support/RegisteredBean.java
|
{
"start": 1965,
"end": 6126
}
|
class ____ {
private final ConfigurableListableBeanFactory beanFactory;
private final Supplier<String> beanName;
private final boolean generatedBeanName;
private final Supplier<RootBeanDefinition> mergedBeanDefinition;
private final @Nullable RegisteredBean parent;
private RegisteredBean(ConfigurableListableBeanFactory beanFactory, Supplier<String> beanName,
boolean generatedBeanName, Supplier<RootBeanDefinition> mergedBeanDefinition,
@Nullable RegisteredBean parent) {
this.beanFactory = beanFactory;
this.beanName = beanName;
this.generatedBeanName = generatedBeanName;
this.mergedBeanDefinition = mergedBeanDefinition;
this.parent = parent;
}
/**
* Create a new {@link RegisteredBean} instance for a regular bean.
* @param beanFactory the source bean factory
* @param beanName the bean name
* @return a new {@link RegisteredBean} instance
*/
public static RegisteredBean of(ConfigurableListableBeanFactory beanFactory, String beanName) {
Assert.notNull(beanFactory, "'beanFactory' must not be null");
Assert.hasLength(beanName, "'beanName' must not be empty");
return new RegisteredBean(beanFactory, () -> beanName, false,
() -> (RootBeanDefinition) beanFactory.getMergedBeanDefinition(beanName),
null);
}
/**
* Create a new {@link RegisteredBean} instance for a regular bean.
* @param beanFactory the source bean factory
* @param beanName the bean name
* @param mbd the pre-determined merged bean definition
* @return a new {@link RegisteredBean} instance
* @since 6.0.7
*/
static RegisteredBean of(ConfigurableListableBeanFactory beanFactory, String beanName, RootBeanDefinition mbd) {
return new RegisteredBean(beanFactory, () -> beanName, false, () -> mbd, null);
}
/**
* Create a new {@link RegisteredBean} instance for an inner-bean.
* @param parent the parent of the inner-bean
* @param innerBean a {@link BeanDefinitionHolder} for the inner bean
* @return a new {@link RegisteredBean} instance
*/
public static RegisteredBean ofInnerBean(RegisteredBean parent, BeanDefinitionHolder innerBean) {
Assert.notNull(innerBean, "'innerBean' must not be null");
return ofInnerBean(parent, innerBean.getBeanName(), innerBean.getBeanDefinition());
}
/**
* Create a new {@link RegisteredBean} instance for an inner-bean.
* @param parent the parent of the inner-bean
* @param innerBeanDefinition the inner-bean definition
* @return a new {@link RegisteredBean} instance
*/
public static RegisteredBean ofInnerBean(RegisteredBean parent, BeanDefinition innerBeanDefinition) {
return ofInnerBean(parent, null, innerBeanDefinition);
}
/**
* Create a new {@link RegisteredBean} instance for an inner-bean.
* @param parent the parent of the inner-bean
* @param innerBeanName the name of the inner bean or {@code null} to
* generate a name
* @param innerBeanDefinition the inner-bean definition
* @return a new {@link RegisteredBean} instance
*/
public static RegisteredBean ofInnerBean(RegisteredBean parent,
@Nullable String innerBeanName, BeanDefinition innerBeanDefinition) {
Assert.notNull(parent, "'parent' must not be null");
Assert.notNull(innerBeanDefinition, "'innerBeanDefinition' must not be null");
InnerBeanResolver resolver = new InnerBeanResolver(parent, innerBeanName, innerBeanDefinition);
Supplier<String> beanName = (StringUtils.hasLength(innerBeanName) ?
() -> innerBeanName : resolver::resolveBeanName);
return new RegisteredBean(parent.getBeanFactory(), beanName,
innerBeanName == null, resolver::resolveMergedBeanDefinition, parent);
}
/**
* Return the name of the bean.
* @return the beanName the bean name
*/
public String getBeanName() {
return this.beanName.get();
}
/**
* Return if the bean name is generated.
* @return {@code true} if the name was generated
*/
public boolean isGeneratedBeanName() {
return this.generatedBeanName;
}
/**
* Return the bean factory containing the bean.
* @return the bean factory
*/
public ConfigurableListableBeanFactory getBeanFactory() {
return this.beanFactory;
}
/**
* Return the user-defined
|
RegisteredBean
|
java
|
quarkusio__quarkus
|
independent-projects/qute/core/src/main/java/io/quarkus/qute/FragmentSectionHelper.java
|
{
"start": 431,
"end": 2114
}
|
class ____ implements SectionHelper {
private static final String ID = "id";
// the generated id of the template that declares this fragment section
private final String generatedTemplateId;
private final String identifier;
private final Expression rendered;
FragmentSectionHelper(String identifier, Expression rendered, String generatedTemplateId) {
this.identifier = identifier;
this.rendered = rendered;
this.generatedTemplateId = generatedTemplateId;
}
public String getIdentifier() {
return identifier;
}
@Override
public CompletionStage<ResultNode> resolve(SectionResolutionContext context) {
if (isAlwaysExecuted(context)) {
return context.execute();
}
if (rendered.isLiteral()) {
return Booleans.isFalsy(rendered.getLiteral()) ? ResultNode.NOOP : context.execute();
} else {
return context.resolutionContext().evaluate(rendered).thenCompose(r -> {
return Booleans.isFalsy(r) ? ResultNode.NOOP : context.execute();
});
}
}
private boolean isAlwaysExecuted(SectionResolutionContext context) {
if (rendered == null
// executed from an include section
|| context.getParameters().containsKey(Fragment.ATTRIBUTE)) {
return true;
}
Object attribute = context.resolutionContext().getAttribute(Fragment.ATTRIBUTE);
// the attribute is set if executed separately via Template.Fragment
return attribute != null && attribute.equals(generatedTemplateId + identifier);
}
public static
|
FragmentSectionHelper
|
java
|
apache__flink
|
flink-datastream/src/test/java/org/apache/flink/datastream/impl/TestingExecutionEnvironmentFactory.java
|
{
"start": 1088,
"end": 1691
}
|
class ____ implements ExecutionEnvironmentFactory {
private final Function<Configuration, ExecutionEnvironment> createExecutionEnvironmentFunction;
public TestingExecutionEnvironmentFactory(
Function<Configuration, ExecutionEnvironment> createExecutionEnvironmentFunction) {
this.createExecutionEnvironmentFunction = createExecutionEnvironmentFunction;
}
@Override
public ExecutionEnvironment createExecutionEnvironment(Configuration configuration) {
return createExecutionEnvironmentFunction.apply(configuration);
}
}
|
TestingExecutionEnvironmentFactory
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java
|
{
"start": 1212,
"end": 6229
}
|
class ____ extends ActionResponse implements ToXContentObject {
static final ParseField _INDEX = new ParseField("_index");
static final ParseField _ID = new ParseField("_id");
private static final ParseField MATCHED = new ParseField("matched");
static final ParseField EXPLANATION = new ParseField("explanation");
static final ParseField VALUE = new ParseField("value");
static final ParseField DESCRIPTION = new ParseField("description");
static final ParseField DETAILS = new ParseField("details");
static final ParseField GET = new ParseField("get");
private final String index;
private final String id;
private final boolean exists;
private Explanation explanation;
private GetResult getResult;
public ExplainResponse(String index, String id, boolean exists) {
this.index = index;
this.id = id;
this.exists = exists;
}
public ExplainResponse(String index, String id, boolean exists, Explanation explanation) {
this(index, id, exists);
this.explanation = explanation;
}
public ExplainResponse(String index, String id, boolean exists, Explanation explanation, GetResult getResult) {
this(index, id, exists, explanation);
this.getResult = getResult;
}
public ExplainResponse(StreamInput in) throws IOException {
index = in.readString();
id = in.readString();
exists = in.readBoolean();
if (in.readBoolean()) {
explanation = readExplanation(in);
}
if (in.readBoolean()) {
getResult = new GetResult(in);
}
}
public String getIndex() {
return index;
}
public String getId() {
return id;
}
public Explanation getExplanation() {
return explanation;
}
public boolean isMatch() {
return explanation != null && explanation.isMatch();
}
public boolean hasExplanation() {
return explanation != null;
}
public boolean isExists() {
return exists;
}
public GetResult getGetResult() {
return getResult;
}
public RestStatus status() {
return exists ? RestStatus.OK : RestStatus.NOT_FOUND;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(index);
out.writeString(id);
out.writeBoolean(exists);
if (explanation == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
writeExplanation(out, explanation);
}
if (getResult == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
getResult.writeTo(out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(_INDEX.getPreferredName(), index);
builder.field(_ID.getPreferredName(), id);
builder.field(MATCHED.getPreferredName(), isMatch());
if (hasExplanation()) {
builder.startObject(EXPLANATION.getPreferredName());
buildExplanation(builder, explanation);
builder.endObject();
}
if (getResult != null) {
builder.startObject(GET.getPreferredName());
getResult.toXContentEmbedded(builder, params);
builder.endObject();
}
builder.endObject();
return builder;
}
private static void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException {
builder.field(VALUE.getPreferredName(), explanation.getValue());
builder.field(DESCRIPTION.getPreferredName(), explanation.getDescription());
Explanation[] innerExps = explanation.getDetails();
if (innerExps != null) {
builder.startArray(DETAILS.getPreferredName());
for (Explanation exp : innerExps) {
builder.startObject();
buildExplanation(builder, exp);
builder.endObject();
}
builder.endArray();
}
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
ExplainResponse other = (ExplainResponse) obj;
return index.equals(other.index)
&& id.equals(other.id)
&& Objects.equals(explanation, other.explanation)
&& getResult.isExists() == other.getResult.isExists()
&& Objects.equals(getResult.sourceAsMap(), other.getResult.sourceAsMap())
&& Objects.equals(getResult.getFields(), other.getResult.getFields());
}
@Override
public int hashCode() {
return Objects.hash(index, id, explanation, getResult.isExists(), getResult.sourceAsMap(), getResult.getFields());
}
}
|
ExplainResponse
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/MvcUriComponentsBuilder.java
|
{
"start": 32524,
"end": 32587
}
|
class ____ create URLs for method arguments.
*/
public static
|
to
|
java
|
apache__logging-log4j2
|
log4j-mongodb/src/main/java/org/apache/logging/log4j/mongodb/MongoDbProvider.java
|
{
"start": 1675,
"end": 1832
}
|
class ____ implements NoSqlProvider<MongoDb4Connection> {
/**
* Builds new {@link MongoDb4Provider} instance.
*/
public static
|
MongoDbProvider
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/engine/jdbc/env/internal/QualifiedObjectNameFormatterStandardImpl.java
|
{
"start": 820,
"end": 3178
}
|
class ____ implements QualifiedObjectNameFormatter {
private final Format format;
public QualifiedObjectNameFormatterStandardImpl(
NameQualifierSupport nameQualifierSupport,
String catalogSeparator,
boolean catalogAtEnd) {
this.format = buildFormat( nameQualifierSupport, catalogSeparator, catalogAtEnd );
}
private Format buildFormat(
NameQualifierSupport nameQualifierSupport,
String catalogSeparator,
boolean catalogAtEnd) {
return switch ( nameQualifierSupport ) {
case NONE -> NoQualifierSupportFormat.INSTANCE;
case CATALOG -> catalogAtEnd
? new NameCatalogFormat( catalogSeparator )
: new CatalogNameFormat( catalogSeparator );
case SCHEMA -> SchemaNameFormat.INSTANCE;
default -> catalogAtEnd
? new SchemaNameCatalogFormat( catalogSeparator )
: new CatalogSchemaNameFormat( catalogSeparator );
};
}
public QualifiedObjectNameFormatterStandardImpl(NameQualifierSupport nameQualifierSupport, String catalogSeparator) {
// most dbs simply do <catalog>.<schema>.<name>
this( nameQualifierSupport, catalogSeparator, false );
}
public QualifiedObjectNameFormatterStandardImpl(
NameQualifierSupport nameQualifierSupport,
DatabaseMetaData databaseMetaData) throws SQLException {
this(
nameQualifierSupport,
databaseMetaData.getCatalogSeparator(),
!databaseMetaData.isCatalogAtStart()
);
}
@Override
public String format(QualifiedTableName qualifiedTableName, Dialect dialect) {
return format.format(
qualifiedTableName.getCatalogName(),
qualifiedTableName.getSchemaName(),
qualifiedTableName.getTableName(),
dialect
);
}
private static String render(Identifier identifier, Dialect dialect) {
if ( identifier == null ) {
return null;
}
return identifier.render( dialect );
}
@Override
public String format(QualifiedSequenceName qualifiedSequenceName, Dialect dialect) {
return format.format(
qualifiedSequenceName.getCatalogName(),
qualifiedSequenceName.getSchemaName(),
qualifiedSequenceName.getSequenceName(),
dialect
);
}
@Override
public String format(QualifiedName qualifiedName, Dialect dialect) {
return format.format(
qualifiedName.getCatalogName(),
qualifiedName.getSchemaName(),
qualifiedName.getObjectName(),
dialect
);
}
private
|
QualifiedObjectNameFormatterStandardImpl
|
java
|
spring-projects__spring-boot
|
module/spring-boot-restclient/src/test/java/org/springframework/boot/restclient/autoconfigure/observation/RestTemplateObservationAutoConfigurationWithoutMetricsTests.java
|
{
"start": 2269,
"end": 3461
}
|
class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withBean(ObservationRegistry.class, TestObservationRegistry::create)
.withConfiguration(AutoConfigurations.of(ObservationAutoConfiguration.class,
RestTemplateAutoConfiguration.class, RestTemplateObservationAutoConfiguration.class));
@Test
void restTemplateCreatedWithBuilderIsInstrumented() {
this.contextRunner.run((context) -> {
RestTemplate restTemplate = buildRestTemplate(context);
restTemplate.getForEntity("/projects/{project}", Void.class, "spring-boot");
TestObservationRegistry registry = context.getBean(TestObservationRegistry.class);
assertThat(registry).hasObservationWithNameEqualToIgnoringCase("http.client.requests");
});
}
private RestTemplate buildRestTemplate(AssertableApplicationContext context) {
RestTemplate restTemplate = context.getBean(RestTemplateBuilder.class).build();
MockRestServiceServer server = MockRestServiceServer.createServer(restTemplate);
server.expect(requestTo("/projects/spring-boot")).andRespond(withStatus(HttpStatus.OK));
return restTemplate;
}
}
|
RestTemplateObservationAutoConfigurationWithoutMetricsTests
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/state/testutils/StateAssertions.java
|
{
"start": 1144,
"end": 2326
}
|
class ____ {
public static <K, N, S> void assertContainsExactly(
Iterator<StateEntry<K, N, S>> stateEntries, Map<N, Map<K, S>> expected) {
final List<StateEntryWithEquals<K, N, S>> expectedEntries =
expected.entrySet().stream()
.flatMap(
entry ->
entry.getValue().entrySet().stream()
.map(
ksEntry ->
new StateEntryWithEquals<>(
entry.getKey(),
ksEntry.getKey(),
ksEntry.getValue())))
.collect(Collectors.toList());
assertThat(stateEntries)
.toIterable()
.map(StateEntryWithEquals::new)
.containsExactlyInAnyOrderElementsOf(expectedEntries);
}
private static
|
StateAssertions
|
java
|
spring-projects__spring-framework
|
spring-context/src/jmh/java/org/springframework/context/annotation/AnnotationProcessorBenchmark.java
|
{
"start": 2849,
"end": 3100
}
|
class ____ extends org.springframework.beans.testfixture.beans.TestBean {
@Override
@Resource
@SuppressWarnings("deprecation")
public void setSpouse(ITestBean spouse) {
super.setSpouse(spouse);
}
}
private static
|
ResourceAnnotatedTestBean
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java
|
{
"start": 1348,
"end": 8172
}
|
class ____ extends ESTestCase {
// (Index)VersionUtils.randomVersion() only returns known versions, which are necessarily no later than (Index)Version.CURRENT;
// however we want to also consider our behaviour with all versions, so occasionally pick up a truly random version.
private Version randomVersion() {
return rarely() ? Version.fromId(randomNonNegativeInt()) : VersionUtils.randomVersion(random());
}
private BuildVersion randomBuildVersion() {
return BuildVersion.fromVersionId(randomVersion().id());
}
private IndexVersion randomIndexVersion() {
return rarely() ? IndexVersion.fromId(randomInt()) : IndexVersionUtils.randomVersion();
}
public void testEqualsHashcodeSerialization() {
final Path tempDir = createTempDir();
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
new NodeMetadata(randomAlphaOfLength(10), randomBuildVersion(), randomIndexVersion()),
nodeMetadata -> {
final long generation = NodeMetadata.FORMAT.writeAndCleanup(nodeMetadata, tempDir);
final Tuple<NodeMetadata, Long> nodeMetadataLongTuple = NodeMetadata.FORMAT.loadLatestStateWithGeneration(
logger,
xContentRegistry(),
tempDir
);
assertThat(nodeMetadataLongTuple.v2(), equalTo(generation));
return nodeMetadataLongTuple.v1();
},
nodeMetadata -> switch (randomInt(3)) {
case 0 -> new NodeMetadata(
randomAlphaOfLength(21 - nodeMetadata.nodeId().length()),
nodeMetadata.nodeVersion(),
nodeMetadata.oldestIndexVersion()
);
case 1 -> new NodeMetadata(
nodeMetadata.nodeId(),
randomValueOtherThan(nodeMetadata.nodeVersion(), this::randomBuildVersion),
nodeMetadata.oldestIndexVersion()
);
default -> new NodeMetadata(
nodeMetadata.nodeId(),
nodeMetadata.nodeVersion(),
randomValueOtherThan(nodeMetadata.oldestIndexVersion(), this::randomIndexVersion)
);
}
);
}
public void testFailsToReadFormatWithoutVersion() throws IOException {
final Path tempDir = createTempDir();
final Path stateDir = Files.createDirectory(tempDir.resolve(MetadataStateFormat.STATE_DIR_NAME));
final InputStream resource = this.getClass().getResourceAsStream("testReadsFormatWithoutVersion.binary");
assertThat(resource, notNullValue());
Files.copy(resource, stateDir.resolve(NodeMetadata.FORMAT.getStateFileName(between(0, Integer.MAX_VALUE))));
ElasticsearchException ex = expectThrows(
ElasticsearchException.class,
() -> NodeMetadata.FORMAT.loadLatestState(logger, xContentRegistry(), tempDir)
);
Throwable rootCause = ex.getRootCause();
assertThat(rootCause, instanceOf(IllegalStateException.class));
assertThat("Node version is required in node metadata", equalTo(rootCause.getMessage()));
}
public void testUpgradesLegitimateVersions() {
final String nodeId = randomAlphaOfLength(10);
final NodeMetadata nodeMetadata = new NodeMetadata(
nodeId,
randomValueOtherThanMany(v -> v.isFutureVersion() || v.onOrAfterMinimumCompatible() == false, this::randomBuildVersion),
IndexVersion.current()
).upgradeToCurrentVersion();
assertThat(nodeMetadata.nodeVersion(), equalTo(BuildVersion.current()));
assertThat(nodeMetadata.nodeId(), equalTo(nodeId));
}
public void testUpgradesMissingVersion() {
final String nodeId = randomAlphaOfLength(10);
final IllegalStateException illegalStateException = expectThrows(
IllegalStateException.class,
() -> new NodeMetadata(nodeId, BuildVersion.fromVersionId(0), IndexVersion.current()).upgradeToCurrentVersion()
);
assertThat(
illegalStateException.getMessage(),
startsWith(
"cannot upgrade a node from version [" + Version.V_EMPTY + "] directly to version [" + Build.current().version() + "]"
)
);
}
public void testDoesNotUpgradeFutureVersion() {
final IllegalStateException illegalStateException = expectThrows(
IllegalStateException.class,
() -> new NodeMetadata(randomAlphaOfLength(10), tooNewBuildVersion(), IndexVersion.current()).upgradeToCurrentVersion()
);
assertThat(
illegalStateException.getMessage(),
allOf(startsWith("cannot downgrade a node from version ["), endsWith("] to version [" + Build.current().version() + "]"))
);
}
public void testDoesNotUpgradeAncientVersion() {
final IllegalStateException illegalStateException = expectThrows(
IllegalStateException.class,
() -> new NodeMetadata(randomAlphaOfLength(10), tooOldBuildVersion(), IndexVersion.current()).upgradeToCurrentVersion()
);
assertThat(
illegalStateException.getMessage(),
allOf(
startsWith("cannot upgrade a node from version ["),
endsWith(
"] directly to version ["
+ Build.current().version()
+ "], upgrade to version ["
+ Build.current().minWireCompatVersion()
+ "] first."
)
)
);
}
public void testUpgradeMarksPreviousVersion() {
final String nodeId = randomAlphaOfLength(10);
final Version version = VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.V_9_0_0);
final BuildVersion buildVersion = BuildVersion.fromVersionId(version.id());
final NodeMetadata nodeMetadata = new NodeMetadata(nodeId, buildVersion, IndexVersion.current()).upgradeToCurrentVersion();
assertThat(nodeMetadata.nodeVersion(), equalTo(BuildVersion.current()));
assertThat(nodeMetadata.previousNodeVersion(), equalTo(buildVersion));
}
public static IndexVersion tooNewIndexVersion() {
return IndexVersion.fromId(between(IndexVersion.current().id() + 1, 99999999));
}
public static BuildVersion tooNewBuildVersion() {
return BuildVersion.fromVersionId(between(Version.CURRENT.id() + 1, 99999999));
}
public static BuildVersion tooOldBuildVersion() {
return BuildVersion.fromVersionId(between(1, Version.CURRENT.minimumCompatibilityVersion().id - 1));
}
}
|
NodeMetadataTests
|
java
|
netty__netty
|
testsuite/src/main/java/io/netty/testsuite/transport/socket/SocketAddressesTest.java
|
{
"start": 1252,
"end": 4102
}
|
class ____ extends AbstractSocketTest {
@Test
@Timeout(value = 30000, unit = TimeUnit.MILLISECONDS)
public void testAddresses(TestInfo testInfo) throws Throwable {
run(testInfo, new Runner<ServerBootstrap, Bootstrap>() {
@Override
public void run(ServerBootstrap serverBootstrap, Bootstrap bootstrap) throws Throwable {
testAddresses(serverBootstrap, bootstrap, true);
}
});
}
@Test
@Timeout(value = 30000, unit = TimeUnit.MILLISECONDS)
public void testAddressesConnectWithoutLocalAddress(TestInfo testInfo) throws Throwable {
run(testInfo, new Runner<ServerBootstrap, Bootstrap>() {
@Override
public void run(ServerBootstrap serverBootstrap, Bootstrap bootstrap) throws Throwable {
testAddresses(serverBootstrap, bootstrap, false);
}
});
}
protected abstract void assertAddress(SocketAddress address);
private void testAddresses(ServerBootstrap sb, Bootstrap cb, boolean withLocalAddress) throws Throwable {
Channel serverChannel = null;
Channel clientChannel = null;
try {
final Promise<SocketAddress> localAddressPromise = ImmediateEventExecutor.INSTANCE.newPromise();
final Promise<SocketAddress> remoteAddressPromise = ImmediateEventExecutor.INSTANCE.newPromise();
serverChannel = sb.childHandler(new ChannelInboundHandlerAdapter() {
@Override
public void channelActive(ChannelHandlerContext ctx) {
localAddressPromise.setSuccess(ctx.channel().localAddress());
remoteAddressPromise.setSuccess(ctx.channel().remoteAddress());
}
}).bind().syncUninterruptibly().channel();
clientChannel = cb.handler(new ChannelInboundHandlerAdapter()).register().syncUninterruptibly().channel();
assertNull(clientChannel.localAddress());
assertNull(clientChannel.remoteAddress());
if (withLocalAddress) {
clientChannel.connect(serverChannel.localAddress(), newSocketAddress()).syncUninterruptibly().channel();
} else {
clientChannel.connect(serverChannel.localAddress()).syncUninterruptibly().channel();
}
assertAddress(clientChannel.localAddress());
assertAddress(clientChannel.remoteAddress());
assertAddress(localAddressPromise.get());
assertAddress(remoteAddressPromise.get());
} finally {
if (clientChannel != null) {
clientChannel.close().syncUninterruptibly();
}
if (serverChannel != null) {
serverChannel.close().syncUninterruptibly();
}
}
}
}
|
SocketAddressesTest
|
java
|
quarkusio__quarkus
|
extensions/smallrye-health/runtime/src/main/java/io/quarkus/smallrye/health/runtime/SmallRyeHealthHandlerBase.java
|
{
"start": 1064,
"end": 4899
}
|
class ____ implements Handler<RoutingContext> {
static volatile boolean problemDetails = false;
protected abstract Uni<SmallRyeHealth> getHealth(SmallRyeHealthReporter reporter, RoutingContext routingContext);
private static final Map<String, ?> JSON_CONFIG = Collections.singletonMap(JsonGenerator.PRETTY_PRINTING, true);
private static final JsonProvider JSON_PROVIDER = JsonProvider.provider();
private static final JsonWriterFactory JSON_WRITER_FACTORY = JSON_PROVIDER.createWriterFactory(JSON_CONFIG);
@Override
public void handle(RoutingContext ctx) {
ManagedContext requestContext = Arc.container().requestContext();
if (requestContext.isActive()) {
doHandle(ctx, null);
} else {
requestContext.activate();
try {
doHandle(ctx, requestContext);
} catch (Exception e) {
requestContext.terminate();
throw e;
}
}
}
private void doHandle(RoutingContext ctx, ManagedContext requestContext) {
QuarkusHttpUser user = (QuarkusHttpUser) ctx.user();
if (user != null) {
Arc.container().instance(CurrentIdentityAssociation.class).get().setIdentity(user.getSecurityIdentity());
}
SmallRyeHealthReporter reporter = Arc.container().instance(SmallRyeHealthReporter.class).get();
Context context = Vertx.currentContext();
Uni<SmallRyeHealth> healthUni = getHealth(reporter, ctx);
if (context != null) {
healthUni = healthUni.emitOn(MutinyHelper.executor(context));
}
healthUni.subscribe().with(health -> {
if (requestContext != null) {
requestContext.terminate();
}
HttpServerResponse resp = ctx.response();
Buffer buffer = Buffer.buffer(256); // this size seems to cover the basic health checks
if (health.isDown()) {
resp.setStatusCode(503);
if (problemDetails) {
resp.headers().set(HttpHeaders.CONTENT_TYPE, "application/problem+json");
try (BufferOutputStream outputStream = new BufferOutputStream(buffer)) {
JsonObjectBuilder objectBuilder = JSON_PROVIDER.createObjectBuilder();
objectBuilder
.add("type", "about:blank")
.add("status", 503)
.add("title", "Health Check Failed: " + ctx.normalizedPath())
.add("detail", ctx.request().uri() + ", invoked at " + Instant.now())
.add("instance", ctx.request().absoluteURI())
.add("health", health.getPayload());
JsonWriter writer = JSON_WRITER_FACTORY.createWriter(outputStream);
writer.writeObject(objectBuilder.build());
resp.end(buffer);
return;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
resp.headers()
.set(HttpHeaders.CONTENT_TYPE, "application/json; charset=UTF-8")
.set(HttpHeaders.CACHE_CONTROL, "no-store");
try (BufferOutputStream outputStream = new BufferOutputStream(buffer)) {
reporter.reportHealth(outputStream, health);
resp.end(buffer);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}, failure -> {
if (requestContext != null) {
requestContext.terminate();
}
});
}
}
|
SmallRyeHealthHandlerBase
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-spring-boot/src/test/java/org/assertj/tests/core/api/recursive/comparison/Issue_3551_Test.java
|
{
"start": 2055,
"end": 2164
}
|
class ____ {
@SuppressWarnings("unused")
@Id
private String name;
}
@Repository
|
PersonEntity
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/pattern/MessageAnsiConverterTest.java
|
{
"start": 1508,
"end": 2550
}
|
class ____ {
private static final String EXPECTED =
"\u001B[31;1mWarning!\u001B[m Pants on \u001B[31mfire!\u001B[m" + Strings.LINE_SEPARATOR;
private Logger logger;
private ListAppender app;
@BeforeEach
void setUp(final LoggerContext context, @Named("List") final ListAppender app) {
this.logger = context.getLogger("LoggerTest");
this.app = app.clear();
}
@Test
void testReplacement() {
// See https://www.javadoc.io/doc/org.jline/jline/latest/org/jline/jansi/AnsiRenderer.html
logger.error("@|red,bold Warning!|@ Pants on @|red fire!|@");
final List<String> msgs = app.getMessages();
assertNotNull(msgs);
assertEquals(1, msgs.size(), "Incorrect number of messages. Should be 1 is " + msgs.size());
assertTrue(
msgs.get(0).endsWith(EXPECTED),
"Replacement failed - expected ending " + EXPECTED + ", actual " + msgs.get(0));
// System.out.println(msgs.get(0));
}
}
|
MessageAnsiConverterTest
|
java
|
micronaut-projects__micronaut-core
|
http/src/main/java/io/micronaut/http/annotation/Get.java
|
{
"start": 1273,
"end": 3472
}
|
interface ____ {
/**
* @return The URI of the GET route
*/
@AliasFor(annotation = HttpMethodMapping.class, member = "value")
@AliasFor(annotation = UriMapping.class, member = "value")
String value() default UriMapping.DEFAULT_URI;
/**
* @return The URI of the GET route
*/
@AliasFor(annotation = HttpMethodMapping.class, member = "value")
@AliasFor(annotation = UriMapping.class, member = "value")
String uri() default UriMapping.DEFAULT_URI;
/**
* Only to be used in the context of a server.
*
* @return The URIs of the GET route
*/
@AliasFor(annotation = HttpMethodMapping.class, member = "uris")
@AliasFor(annotation = UriMapping.class, member = "uris")
String[] uris() default {UriMapping.DEFAULT_URI};
/**
* @return The default produces, otherwise override from controller
*/
@AliasFor(annotation = Produces.class, member = "value")
String[] produces() default {};
/**
* The default consumes. Ignored for server request which never a consume a value for a GET request.
*
* @return The default consumes, otherwise override from controller
*/
@AliasFor(annotation = Consumes.class, member = "value")
String[] consumes() default {};
/**
* Shortcut that allows setting both the {@link #consumes()} and {@link #produces()} settings to the same media type.
*
* @return The media type this method processes
*/
@AliasFor(annotation = Produces.class, member = "value")
@AliasFor(annotation = Consumes.class, member = "value")
String[] processes() default {};
/**
* Shortcut that allows setting both the {@link Consumes} and {@link Produces} single settings.
*
* @return Whether a single or multiple items are produced/consumed
*/
@AliasFor(annotation = Produces.class, member = "single")
@AliasFor(annotation = Consumes.class, member = "single")
@AliasFor(annotation = SingleResult.class, member = "value")
boolean single() default false;
/**
* @return True if a HEAD route should also be registered for the same method
*/
boolean headRoute() default true;
}
|
Get
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/collect/FluentIterableTest.java
|
{
"start": 5638,
"end": 5675
}
|
class ____ implements X, Y {}
static
|
A
|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/cluster/coordination/CoordinationStateTestCluster.java
|
{
"start": 4206,
"end": 14987
}
|
class ____ {
private final ElectionStrategy electionStrategy;
DiscoveryNode localNode;
CoordinationState.PersistedState persistedState;
CoordinationState state;
ClusterNode(DiscoveryNode localNode, ElectionStrategy electionStrategy) {
this.localNode = localNode;
persistedState = new InMemoryPersistedState(
0L,
clusterState(
0L,
0L,
localNode,
CoordinationMetadata.VotingConfiguration.EMPTY_CONFIG,
CoordinationMetadata.VotingConfiguration.EMPTY_CONFIG,
0L
)
);
this.electionStrategy = electionStrategy;
state = new CoordinationState(localNode, persistedState, electionStrategy);
}
void reboot() {
if (localNode.isMasterNode() == false && rarely()) {
// master-ineligible nodes can't be trusted to persist the cluster state properly, but will not lose the fact that they
// were bootstrapped
final CoordinationMetadata.VotingConfiguration votingConfiguration = persistedState.getLastAcceptedState()
.getLastAcceptedConfiguration()
.isEmpty()
? CoordinationMetadata.VotingConfiguration.EMPTY_CONFIG
: CoordinationMetadata.VotingConfiguration.MUST_JOIN_ELECTED_MASTER;
persistedState = new InMemoryPersistedState(
0L,
clusterState(0L, 0L, localNode, votingConfiguration, votingConfiguration, 0L)
);
}
final Set<DiscoveryNodeRole> roles = new HashSet<>(localNode.getRoles());
if (randomBoolean()) {
if (roles.contains(DiscoveryNodeRole.MASTER_ROLE)) {
roles.remove(DiscoveryNodeRole.MASTER_ROLE);
} else {
roles.add(DiscoveryNodeRole.MASTER_ROLE);
}
}
localNode = DiscoveryNodeUtils.builder(localNode.getId())
.name(localNode.getName())
.ephemeralId(UUIDs.randomBase64UUID(random()))
.address(localNode.getHostName(), localNode.getHostAddress(), localNode.getAddress())
.attributes(localNode.getAttributes())
.roles(roles)
.version(localNode.getVersionInformation())
.build();
state = new CoordinationState(localNode, persistedState, electionStrategy);
}
void setInitialState(CoordinationMetadata.VotingConfiguration initialConfig, long initialValue) {
final ClusterState.Builder builder = ClusterState.builder(state.getLastAcceptedState());
builder.metadata(
Metadata.builder()
.coordinationMetadata(
CoordinationMetadata.builder()
.lastAcceptedConfiguration(initialConfig)
.lastCommittedConfiguration(initialConfig)
.build()
)
);
state.setInitialState(setValue(builder.build(), initialValue));
}
}
final ElectionStrategy electionStrategy;
final List<Message> messages;
final List<ClusterNode> clusterNodes;
final CoordinationMetadata.VotingConfiguration initialConfiguration;
final long initialValue;
public CoordinationStateTestCluster(List<DiscoveryNode> nodes, ElectionStrategy electionStrategy) {
this.electionStrategy = electionStrategy;
messages = new ArrayList<>();
clusterNodes = nodes.stream().map(node -> new ClusterNode(node, electionStrategy)).toList();
initialConfiguration = randomVotingConfig();
initialValue = randomLong();
}
record Message(DiscoveryNode sourceNode, DiscoveryNode targetNode, Object payload) {}
void reply(Message m, Object payload) {
messages.add(new Message(m.targetNode, m.sourceNode, payload));
}
void broadcast(DiscoveryNode sourceNode, Object payload) {
clusterNodes.stream().map(cn -> new Message(sourceNode, cn.localNode, payload)).forEach(messages::add);
}
Optional<ClusterNode> getNode(DiscoveryNode node) {
return clusterNodes.stream().filter(cn -> cn.localNode.equals(node)).findFirst();
}
CoordinationMetadata.VotingConfiguration randomVotingConfig() {
return new CoordinationMetadata.VotingConfiguration(
randomSubsetOf(randomIntBetween(1, clusterNodes.size()), clusterNodes).stream().map(cn -> cn.localNode.getId()).collect(toSet())
);
}
void applyMessage(Message message) {
final Optional<ClusterNode> maybeNode = getNode(message.targetNode);
if (maybeNode.isPresent() == false) {
throw new CoordinationStateRejectedException("node not available");
} else {
final Object payload = message.payload;
if (payload instanceof StartJoinRequest) {
reply(message, maybeNode.get().state.handleStartJoin((StartJoinRequest) payload));
} else if (payload instanceof Join) {
maybeNode.get().state.handleJoin((Join) payload);
} else if (payload instanceof PublishRequest) {
reply(message, maybeNode.get().state.handlePublishRequest((PublishRequest) payload));
} else if (payload instanceof PublishResponse) {
maybeNode.get().state.handlePublishResponse(message.sourceNode, (PublishResponse) payload)
.ifPresent(ac -> broadcast(message.targetNode, ac));
} else if (payload instanceof ApplyCommitRequest) {
maybeNode.get().state.handleCommit((ApplyCommitRequest) payload);
} else {
throw new AssertionError("unknown message type");
}
}
}
public void runRandomly() {
final int iterations = 10000;
final long maxTerm = 4;
long nextTerm = 1;
for (int i = 0; i < iterations; i++) {
try {
if (rarely() && nextTerm < maxTerm) {
final long term = rarely() ? randomLongBetween(0, maxTerm + 1) : nextTerm++;
final StartJoinRequest startJoinRequest = new StartJoinRequest(randomFrom(clusterNodes).localNode, term);
broadcast(startJoinRequest.getMasterCandidateNode(), startJoinRequest);
} else if (rarely()) {
randomFrom(clusterNodes).setInitialState(initialConfiguration, initialValue);
} else if (rarely() && rarely()) {
randomFrom(clusterNodes).reboot();
} else if (rarely()) {
final List<ClusterNode> masterNodes = clusterNodes.stream().filter(cn -> cn.state.electionWon()).toList();
if (masterNodes.isEmpty() == false) {
final ClusterNode clusterNode = randomFrom(masterNodes);
final long term = rarely() ? randomLongBetween(0, maxTerm + 1) : clusterNode.state.getCurrentTerm();
final long version = rarely() ? randomIntBetween(0, 5) : clusterNode.state.getLastPublishedVersion() + 1;
final CoordinationMetadata.VotingConfiguration acceptedConfig = rarely()
? randomVotingConfig()
: clusterNode.state.getLastAcceptedConfiguration();
final PublishRequest publishRequest = clusterNode.state.handleClientValue(
clusterState(
term,
version,
clusterNode.localNode,
clusterNode.state.getLastCommittedConfiguration(),
acceptedConfig,
randomLong()
)
);
broadcast(clusterNode.localNode, publishRequest);
}
} else if (messages.isEmpty() == false) {
applyMessage(randomFrom(messages));
}
// check node invariants after each iteration
clusterNodes.forEach(cn -> cn.state.invariant());
} catch (CoordinationStateRejectedException e) {
// ignore
}
}
// check system invariants. It's sufficient to do this at the end as these invariants are monotonic.
invariant();
}
void invariant() {
// one master per term
messages.stream()
.filter(m -> m.payload instanceof PublishRequest)
.collect(Collectors.groupingBy(m -> ((PublishRequest) m.payload).getAcceptedState().term()))
.forEach((term, publishMessages) -> {
Set<DiscoveryNode> mastersForTerm = publishMessages.stream().collect(Collectors.groupingBy(m -> m.sourceNode)).keySet();
assertThat("Multiple masters " + mastersForTerm + " for term " + term, mastersForTerm, hasSize(1));
});
// unique cluster state per (term, version) pair
messages.stream()
.filter(m -> m.payload instanceof PublishRequest)
.map(m -> ((PublishRequest) m.payload).getAcceptedState())
.collect(Collectors.groupingBy(ClusterState::term))
.forEach((term, clusterStates) -> {
clusterStates.stream().collect(Collectors.groupingBy(ClusterState::version)).forEach((version, clusterStates1) -> {
Set<String> clusterStateUUIDsForTermAndVersion = clusterStates1.stream()
.collect(Collectors.groupingBy(ClusterState::stateUUID))
.keySet();
assertThat(
"Multiple cluster states " + clusterStates1 + " for term " + term + " and version " + version,
clusterStateUUIDsForTermAndVersion,
hasSize(1)
);
Set<Long> clusterStateValuesForTermAndVersion = clusterStates1.stream()
.collect(Collectors.groupingBy(CoordinationStateTestCluster::value))
.keySet();
assertThat(
"Multiple cluster states " + clusterStates1 + " for term " + term + " and version " + version,
clusterStateValuesForTermAndVersion,
hasSize(1)
);
});
});
}
}
|
ClusterNode
|
java
|
google__dagger
|
javatests/dagger/hilt/android/MultiTestRoot1Test.java
|
{
"start": 3417,
"end": 3660
}
|
interface ____ {
@Provides
@MultiTestRootExternalModules.External
static Long provideString() {
return REPLACE_EXTERNAL_LONG_VALUE;
}
}
@Module
@InstallIn(ActivityComponent.class)
public
|
ReplaceExternalActivityModule
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/impl/prefetch/TestValidate.java
|
{
"start": 2570,
"end": 11612
}
|
class ____ extends AbstractHadoopTestBase {
@Test
public void testCheckNotNull() throws Exception {
String nonNullArg = "nonNullArg";
String nullArg = null;
// Should not throw.
Validate.checkNotNull(nonNullArg, "nonNullArg");
// Verify it throws.
intercept(IllegalArgumentException.class, "'nullArg' must not be null",
() -> Validate.checkNotNull(nullArg, "nullArg"));
}
@Test
public void testCheckPositiveInteger() throws Exception {
int positiveArg = 1;
int zero = 0;
int negativeArg = -1;
// Should not throw.
checkPositiveInteger(positiveArg, "positiveArg");
// Verify it throws.
intercept(IllegalArgumentException.class,
"'negativeArg' must be a positive integer",
() -> checkPositiveInteger(negativeArg, "negativeArg"));
intercept(IllegalArgumentException.class,
"'zero' must be a positive integer",
() -> checkPositiveInteger(zero, "zero"));
}
@Test
public void testCheckNotNegative() throws Exception {
int positiveArg = 1;
int zero = 0;
int negativeArg = -1;
// Should not throw.
Validate.checkNotNegative(zero, "zeroArg");
Validate.checkNotNegative(positiveArg, "positiveArg");
// Verify it throws.
intercept(IllegalArgumentException.class,
"'negativeArg' must not be negative",
() -> Validate.checkNotNegative(negativeArg, "negativeArg"));
}
@Test
public void testCheckRequired() throws Exception {
// Should not throw.
Validate.checkRequired(true, "arg");
// Verify it throws.
intercept(IllegalArgumentException.class, "'arg' is required",
() -> Validate.checkRequired(false, "arg"));
}
@Test
public void testCheckValid() throws Exception {
// Should not throw.
Validate.checkValid(true, "arg");
// Verify it throws.
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'arg' is invalid",
() -> Validate.checkValid(false, "arg"));
}
@Test
public void testCheckValidWithValues() throws Exception {
String validValues = "foo, bar";
// Should not throw.
Validate.checkValid(true, "arg", validValues);
// Verify it throws.
intercept(IllegalArgumentException.class,
"'arg' is invalid. Valid values are: foo, bar",
() -> Validate.checkValid(false, "arg", validValues));
}
@Test
public void testCheckNotNullAndNotEmpty() throws Exception {
// Should not throw.
Validate.checkNotNullAndNotEmpty(NON_EMPTY_ARRAY, "array");
Validate.checkNotNullAndNotEmpty(NON_EMPTY_BYTE_ARRAY, "array");
Validate.checkNotNullAndNotEmpty(NON_EMPTY_SHORT_ARRAY, "array");
Validate.checkNotNullAndNotEmpty(NON_EMPTY_INT_ARRAY, "array");
Validate.checkNotNullAndNotEmpty(NON_EMPTY_LONG_ARRAY, "array");
// Verify it throws.
intercept(IllegalArgumentException.class, "'string' must not be empty",
() -> Validate.checkNotNullAndNotEmpty("", "string"));
intercept(IllegalArgumentException.class, "'array' must not be null", () ->
Validate.checkNotNullAndNotEmpty(SampleDataForTests.NULL_ARRAY,
"array"));
intercept(IllegalArgumentException.class,
"'array' must have at least one element", () ->
Validate.checkNotNullAndNotEmpty(SampleDataForTests.EMPTY_ARRAY,
"array"));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'array' must not be null",
() -> Validate.checkNotNullAndNotEmpty(NULL_BYTE_ARRAY, "array"));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'array' must have at least one element",
() -> Validate.checkNotNullAndNotEmpty(EMPTY_BYTE_ARRAY, "array"));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'array' must not be null",
() -> Validate.checkNotNullAndNotEmpty(NULL_SHORT_ARRAY, "array"));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'array' must have at least one element",
() -> Validate.checkNotNullAndNotEmpty(EMPTY_SHORT_ARRAY, "array"));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'array' must not be null",
() -> Validate.checkNotNullAndNotEmpty(NULL_INT_ARRAY, "array"));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'array' must have at least one element",
() -> Validate.checkNotNullAndNotEmpty(EMPTY_INT_ARRAY, "array"));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'array' must not be null",
() -> Validate.checkNotNullAndNotEmpty(NULL_LONG_ARRAY, "array"));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'array' must have at least one element",
() -> Validate.checkNotNullAndNotEmpty(EMPTY_LONG_ARRAY, "array"));
}
@Test
public void testCheckListNotNullAndNotEmpty() throws Exception {
// Should not throw.
Validate.checkNotNullAndNotEmpty(VALID_LIST, "list");
// Verify it throws.
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'list' must not be null",
() -> Validate.checkNotNullAndNotEmpty(NULL_LIST, "list"));
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"'list' must have at least one element",
() -> Validate.checkNotNullAndNotEmpty(EMPTY_LIST, "list"));
}
@Test
public void testCheckNotNullAndNumberOfElements() throws Exception {
// Should not throw.
Validate.checkNotNullAndNumberOfElements(Arrays.asList(1, 2, 3), 3, "arg");
// Verify it throws.
intercept(IllegalArgumentException.class, "'arg' must not be null",
() -> Validate.checkNotNullAndNumberOfElements(null, 3, "arg"));
// Verify it throws.
ExceptionAsserts.assertThrows(
IllegalArgumentException.class,
"Number of elements in 'arg' must be exactly 3, 2 given.",
() -> Validate.checkNotNullAndNumberOfElements(Arrays.asList(1, 2), 3,
"arg")
);
}
@Test
public void testCheckValuesEqual() throws Exception {
// Should not throw.
Validate.checkValuesEqual(1, "arg1", 1, "arg2");
// Verify it throws.
intercept(IllegalArgumentException.class,
"'arg1' (1) must equal 'arg2' (2)",
() -> Validate.checkValuesEqual(1, "arg1", 2, "arg2"));
}
@Test
public void testCheckIntegerMultiple() throws Exception {
// Should not throw.
Validate.checkIntegerMultiple(10, "arg1", 5, "arg2");
// Verify it throws.
intercept(IllegalArgumentException.class,
"'arg1' (10) must be an integer multiple of 'arg2' (3)",
() -> Validate.checkIntegerMultiple(10, "arg1", 3, "arg2"));
}
@Test
public void testCheckGreater() throws Exception {
// Should not throw.
Validate.checkGreater(10, "arg1", 5, "arg2");
// Verify it throws.
intercept(IllegalArgumentException.class,
"'arg1' (5) must be greater than 'arg2' (10)",
() -> Validate.checkGreater(5, "arg1", 10, "arg2"));
}
@Test
public void testCheckGreaterOrEqual() throws Exception {
// Should not throw.
Validate.checkGreaterOrEqual(10, "arg1", 5, "arg2");
// Verify it throws.
intercept(IllegalArgumentException.class,
"'arg1' (5) must be greater than or equal to 'arg2' (10)",
() -> Validate.checkGreaterOrEqual(5, "arg1", 10, "arg2"));
}
@Test
public void testCheckWithinRange() throws Exception {
// Should not throw.
Validate.checkWithinRange(10, "arg", 5, 15);
Validate.checkWithinRange(10.0, "arg", 5.0, 15.0);
// Verify it throws.
intercept(IllegalArgumentException.class,
"'arg' (5) must be within the range [10, 20]",
() -> Validate.checkWithinRange(5, "arg", 10, 20));
intercept(IllegalArgumentException.class,
"'arg' (5.0) must be within the range [10.0, 20.0]",
() -> Validate.checkWithinRange(5.0, "arg", 10.0, 20.0));
}
@Test
public void testCheckPathExists() throws Exception {
Path tempFile = Files.createTempFile("foo", "bar");
Path tempDir = tempFile.getParent();
Path notFound = Paths.get("<not-found>");
// Should not throw.
Validate.checkPathExists(tempFile, "tempFile");
Validate.checkPathExists(tempDir, "tempDir");
// Verify it throws.
intercept(IllegalArgumentException.class, "'nullArg' must not be null",
() -> Validate.checkPathExists(null, "nullArg"));
intercept(IllegalArgumentException.class,
"Path notFound (<not-found>) does not exist",
() -> Validate.checkPathExists(notFound, "notFound"));
intercept(IllegalArgumentException.class, "must point to a directory",
() -> Validate.checkPathExistsAsDir(tempFile, "tempFile"));
intercept(IllegalArgumentException.class, "must point to a file",
() -> Validate.checkPathExistsAsFile(tempDir, "tempDir"));
}
}
|
TestValidate
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/bean/BeanDeserializerTest.java
|
{
"start": 7085,
"end": 8401
}
|
class ____ extends ValueDeserializerModifier
{
@Override
public ValueDeserializer<?> modifyDeserializer(DeserializationConfig config,
BeanDescription.Supplier beanDescRef, ValueDeserializer<?> deserializer) {
if (beanDescRef.getBeanClass() == Issue1912Bean.class) {
return new Issue1912CustomBeanDeserializer((BeanDeserializer) deserializer);
}
return super.modifyDeserializer(config, beanDescRef, deserializer);
}
@Override
public BeanDeserializerBuilder updateBuilder(DeserializationConfig config,
BeanDescription.Supplier beanDescRef, BeanDeserializerBuilder builder) {
if (beanDescRef.getBeanClass() == Issue1912Bean.class) {
Iterator<SettableBeanProperty> props = builder.getProperties();
while (props.hasNext()) {
SettableBeanProperty prop = props.next();
SettableBeanProperty propWithCustomDeserializer = prop.withValueDeserializer(new Issue1912CustomPropertyDeserializer());
builder.addOrReplaceProperty(propWithCustomDeserializer, true);
}
}
return builder;
}
}
public
|
Issue1912UseAddOrReplacePropertyDeserializerModifier
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/DoubleScanTests.java
|
{
"start": 733,
"end": 889
}
|
class ____ extends SimpleScanTests {
@Override
protected String[] getConfigLocations() {
return new String[] {"doubleScanTests.xml"};
}
}
|
DoubleScanTests
|
java
|
hibernate__hibernate-orm
|
hibernate-jcache/src/test/java/org/hibernate/orm/test/jcache/JCacheTransactionalCacheConcurrencyStrategyTest.java
|
{
"start": 1875,
"end": 2702
}
|
class ____ {
@Test
public void testTransactional(SessionFactoryScope factoryScope) {
final SQLStatementInspector sqlCollector = factoryScope.getCollectingStatementInspector();
factoryScope.inTransaction( (session) -> {
Parent parent = new Parent( 1, "first" );
for ( int i = 0; i < 2; i++ ) {
final Child child = new Child( i, "child #" + i, parent );
parent.addChild( child );
}
session.persist( parent );
} );
factoryScope.inTransaction( (session) -> {
sqlCollector.clear();
Parent parent = session.find( Parent.class, 1 );
assertThat( sqlCollector.getSqlQueries() ).isEmpty();
assertThat( parent.getChildren() ).hasSize( 2 );
} );
}
@Entity(name = "Parent")
@Cache(usage = CacheConcurrencyStrategy.TRANSACTIONAL)
public static
|
JCacheTransactionalCacheConcurrencyStrategyTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialExtentGeoPointDocValuesGroupingAggregatorFunction.java
|
{
"start": 1428,
"end": 17118
}
|
class ____ implements GroupingAggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("top", ElementType.INT),
new IntermediateStateDesc("bottom", ElementType.INT),
new IntermediateStateDesc("negLeft", ElementType.INT),
new IntermediateStateDesc("negRight", ElementType.INT),
new IntermediateStateDesc("posLeft", ElementType.INT),
new IntermediateStateDesc("posRight", ElementType.INT) );
private final SpatialExtentGroupingStateWrappedLongitudeState state;
private final List<Integer> channels;
private final DriverContext driverContext;
public SpatialExtentGeoPointDocValuesGroupingAggregatorFunction(List<Integer> channels,
SpatialExtentGroupingStateWrappedLongitudeState state, DriverContext driverContext) {
this.channels = channels;
this.state = state;
this.driverContext = driverContext;
}
public static SpatialExtentGeoPointDocValuesGroupingAggregatorFunction create(
List<Integer> channels, DriverContext driverContext) {
return new SpatialExtentGeoPointDocValuesGroupingAggregatorFunction(channels, SpatialExtentGeoPointDocValuesAggregator.initGrouping(), driverContext);
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds,
Page page) {
LongBlock encodedBlock = page.getBlock(channels.get(0));
LongVector encodedVector = encodedBlock.asVector();
if (encodedVector == null) {
maybeEnableGroupIdTracking(seenGroupIds, encodedBlock);
return new GroupingAggregatorFunction.AddInput() {
@Override
public void add(int positionOffset, IntArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, encodedBlock);
}
@Override
public void add(int positionOffset, IntBigArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, encodedBlock);
}
@Override
public void add(int positionOffset, IntVector groupIds) {
addRawInput(positionOffset, groupIds, encodedBlock);
}
@Override
public void close() {
}
};
}
return new GroupingAggregatorFunction.AddInput() {
@Override
public void add(int positionOffset, IntArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, encodedVector);
}
@Override
public void add(int positionOffset, IntBigArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, encodedVector);
}
@Override
public void add(int positionOffset, IntVector groupIds) {
addRawInput(positionOffset, groupIds, encodedVector);
}
@Override
public void close() {
}
};
}
private void addRawInput(int positionOffset, IntArrayBlock groups, LongBlock encodedBlock) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
if (encodedBlock.isNull(valuesPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int encodedStart = encodedBlock.getFirstValueIndex(valuesPosition);
int encodedEnd = encodedStart + encodedBlock.getValueCount(valuesPosition);
for (int encodedOffset = encodedStart; encodedOffset < encodedEnd; encodedOffset++) {
long encodedValue = encodedBlock.getLong(encodedOffset);
SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, encodedValue);
}
}
}
}
private void addRawInput(int positionOffset, IntArrayBlock groups, LongVector encodedVector) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
long encodedValue = encodedVector.getLong(valuesPosition);
SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, encodedValue);
}
}
}
@Override
public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block topUncast = page.getBlock(channels.get(0));
if (topUncast.areAllValuesNull()) {
return;
}
IntVector top = ((IntBlock) topUncast).asVector();
Block bottomUncast = page.getBlock(channels.get(1));
if (bottomUncast.areAllValuesNull()) {
return;
}
IntVector bottom = ((IntBlock) bottomUncast).asVector();
Block negLeftUncast = page.getBlock(channels.get(2));
if (negLeftUncast.areAllValuesNull()) {
return;
}
IntVector negLeft = ((IntBlock) negLeftUncast).asVector();
Block negRightUncast = page.getBlock(channels.get(3));
if (negRightUncast.areAllValuesNull()) {
return;
}
IntVector negRight = ((IntBlock) negRightUncast).asVector();
Block posLeftUncast = page.getBlock(channels.get(4));
if (posLeftUncast.areAllValuesNull()) {
return;
}
IntVector posLeft = ((IntBlock) posLeftUncast).asVector();
Block posRightUncast = page.getBlock(channels.get(5));
if (posRightUncast.areAllValuesNull()) {
return;
}
IntVector posRight = ((IntBlock) posRightUncast).asVector();
assert top.getPositionCount() == bottom.getPositionCount() && top.getPositionCount() == negLeft.getPositionCount() && top.getPositionCount() == negRight.getPositionCount() && top.getPositionCount() == posLeft.getPositionCount() && top.getPositionCount() == posRight.getPositionCount();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valuesPosition = groupPosition + positionOffset;
SpatialExtentGeoPointDocValuesAggregator.combineIntermediate(state, groupId, top.getInt(valuesPosition), bottom.getInt(valuesPosition), negLeft.getInt(valuesPosition), negRight.getInt(valuesPosition), posLeft.getInt(valuesPosition), posRight.getInt(valuesPosition));
}
}
}
private void addRawInput(int positionOffset, IntBigArrayBlock groups, LongBlock encodedBlock) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
if (encodedBlock.isNull(valuesPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int encodedStart = encodedBlock.getFirstValueIndex(valuesPosition);
int encodedEnd = encodedStart + encodedBlock.getValueCount(valuesPosition);
for (int encodedOffset = encodedStart; encodedOffset < encodedEnd; encodedOffset++) {
long encodedValue = encodedBlock.getLong(encodedOffset);
SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, encodedValue);
}
}
}
}
private void addRawInput(int positionOffset, IntBigArrayBlock groups, LongVector encodedVector) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
long encodedValue = encodedVector.getLong(valuesPosition);
SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, encodedValue);
}
}
}
@Override
public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block topUncast = page.getBlock(channels.get(0));
if (topUncast.areAllValuesNull()) {
return;
}
IntVector top = ((IntBlock) topUncast).asVector();
Block bottomUncast = page.getBlock(channels.get(1));
if (bottomUncast.areAllValuesNull()) {
return;
}
IntVector bottom = ((IntBlock) bottomUncast).asVector();
Block negLeftUncast = page.getBlock(channels.get(2));
if (negLeftUncast.areAllValuesNull()) {
return;
}
IntVector negLeft = ((IntBlock) negLeftUncast).asVector();
Block negRightUncast = page.getBlock(channels.get(3));
if (negRightUncast.areAllValuesNull()) {
return;
}
IntVector negRight = ((IntBlock) negRightUncast).asVector();
Block posLeftUncast = page.getBlock(channels.get(4));
if (posLeftUncast.areAllValuesNull()) {
return;
}
IntVector posLeft = ((IntBlock) posLeftUncast).asVector();
Block posRightUncast = page.getBlock(channels.get(5));
if (posRightUncast.areAllValuesNull()) {
return;
}
IntVector posRight = ((IntBlock) posRightUncast).asVector();
assert top.getPositionCount() == bottom.getPositionCount() && top.getPositionCount() == negLeft.getPositionCount() && top.getPositionCount() == negRight.getPositionCount() && top.getPositionCount() == posLeft.getPositionCount() && top.getPositionCount() == posRight.getPositionCount();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valuesPosition = groupPosition + positionOffset;
SpatialExtentGeoPointDocValuesAggregator.combineIntermediate(state, groupId, top.getInt(valuesPosition), bottom.getInt(valuesPosition), negLeft.getInt(valuesPosition), negRight.getInt(valuesPosition), posLeft.getInt(valuesPosition), posRight.getInt(valuesPosition));
}
}
}
private void addRawInput(int positionOffset, IntVector groups, LongBlock encodedBlock) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int valuesPosition = groupPosition + positionOffset;
if (encodedBlock.isNull(valuesPosition)) {
continue;
}
int groupId = groups.getInt(groupPosition);
int encodedStart = encodedBlock.getFirstValueIndex(valuesPosition);
int encodedEnd = encodedStart + encodedBlock.getValueCount(valuesPosition);
for (int encodedOffset = encodedStart; encodedOffset < encodedEnd; encodedOffset++) {
long encodedValue = encodedBlock.getLong(encodedOffset);
SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, encodedValue);
}
}
}
private void addRawInput(int positionOffset, IntVector groups, LongVector encodedVector) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int valuesPosition = groupPosition + positionOffset;
int groupId = groups.getInt(groupPosition);
long encodedValue = encodedVector.getLong(valuesPosition);
SpatialExtentGeoPointDocValuesAggregator.combine(state, groupId, encodedValue);
}
}
@Override
public void addIntermediateInput(int positionOffset, IntVector groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block topUncast = page.getBlock(channels.get(0));
if (topUncast.areAllValuesNull()) {
return;
}
IntVector top = ((IntBlock) topUncast).asVector();
Block bottomUncast = page.getBlock(channels.get(1));
if (bottomUncast.areAllValuesNull()) {
return;
}
IntVector bottom = ((IntBlock) bottomUncast).asVector();
Block negLeftUncast = page.getBlock(channels.get(2));
if (negLeftUncast.areAllValuesNull()) {
return;
}
IntVector negLeft = ((IntBlock) negLeftUncast).asVector();
Block negRightUncast = page.getBlock(channels.get(3));
if (negRightUncast.areAllValuesNull()) {
return;
}
IntVector negRight = ((IntBlock) negRightUncast).asVector();
Block posLeftUncast = page.getBlock(channels.get(4));
if (posLeftUncast.areAllValuesNull()) {
return;
}
IntVector posLeft = ((IntBlock) posLeftUncast).asVector();
Block posRightUncast = page.getBlock(channels.get(5));
if (posRightUncast.areAllValuesNull()) {
return;
}
IntVector posRight = ((IntBlock) posRightUncast).asVector();
assert top.getPositionCount() == bottom.getPositionCount() && top.getPositionCount() == negLeft.getPositionCount() && top.getPositionCount() == negRight.getPositionCount() && top.getPositionCount() == posLeft.getPositionCount() && top.getPositionCount() == posRight.getPositionCount();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int groupId = groups.getInt(groupPosition);
int valuesPosition = groupPosition + positionOffset;
SpatialExtentGeoPointDocValuesAggregator.combineIntermediate(state, groupId, top.getInt(valuesPosition), bottom.getInt(valuesPosition), negLeft.getInt(valuesPosition), negRight.getInt(valuesPosition), posLeft.getInt(valuesPosition), posRight.getInt(valuesPosition));
}
}
private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, LongBlock encodedBlock) {
if (encodedBlock.mayHaveNulls()) {
state.enableGroupIdTracking(seenGroupIds);
}
}
@Override
public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) {
state.enableGroupIdTracking(seenGroupIds);
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) {
state.toIntermediate(blocks, offset, selected, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, IntVector selected,
GroupingAggregatorEvaluationContext ctx) {
blocks[offset] = SpatialExtentGeoPointDocValuesAggregator.evaluateFinal(state, selected, ctx);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
|
SpatialExtentGeoPointDocValuesGroupingAggregatorFunction
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
|
{
"start": 94663,
"end": 95467
}
|
class ____.
* @return property value as a <code>Class</code>,
* or <code>defaultValue</code>.
*/
public <U> Class<? extends U> getClass(String name,
Class<? extends U> defaultValue,
Class<U> xface) {
try {
Class<?> theClass = getClass(name, defaultValue);
if (theClass != null && !xface.isAssignableFrom(theClass))
throw new RuntimeException(theClass+" not "+xface.getName());
else if (theClass != null)
return theClass.asSubclass(xface);
else
return null;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Get the value of the <code>name</code> property as a <code>List</code>
* of objects implementing the
|
type
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/runtime/src/main/java/io/quarkus/resteasy/reactive/server/runtime/ResteasyReactiveServerRuntimeConfig.java
|
{
"start": 359,
"end": 500
}
|
interface ____ {
/**
* Input part configuration.
*/
MultipartConfigGroup multipart();
|
ResteasyReactiveServerRuntimeConfig
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/parameters/setter/SecondSetterInterceptor.java
|
{
"start": 328,
"end": 651
}
|
class ____ {
@AroundInvoke
Object fooAroundInvoke(InvocationContext ctx) throws Exception {
assertEquals("first", ctx.getParameters()[0]);
ctx.setParameters(new String[] { "second" });
assertEquals("second", ctx.getParameters()[0]);
return ctx.proceed();
}
}
|
SecondSetterInterceptor
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/visitor/NamespaceVisitor.java
|
{
"start": 1470,
"end": 1542
}
|
interface ____ {
/** For visiting any {@link INode}. */
|
NamespaceVisitor
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/creators/CreatorWithObjectIdTest.java
|
{
"start": 571,
"end": 1402
}
|
class ____ {
String id;
String name;
public A() { }
@ConstructorProperties({"id", "name"})
public A(String id, String name) {
this.id = id;
this.name = name;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Test
public void testObjectIdWithCreator() throws Exception
{
A a = new A("123", "A");
ObjectMapper om = new ObjectMapper();
String json = om.writeValueAsString(a);
A deser = om.readValue(json, A.class);
assertEquals(a.name, deser.name);
}
}
|
A
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/core/publisher/FluxMaterializeTest.java
|
{
"start": 1140,
"end": 7155
}
|
class ____
extends FluxOperatorTest<String, Signal<String>> {
@Override
@SuppressWarnings("unchecked")
protected List<Scenario<String, Signal<String>>> scenarios_operatorSuccess() {
return Arrays.asList(
scenario(Flux::materialize)
.receive(s -> assertThat(s).isEqualTo(Signal.next(item(0))),
s -> assertThat(s).isEqualTo(Signal.next(item(1))),
s -> assertThat(s).isEqualTo(Signal.next(item(2))),
s -> assertThat(s).isEqualTo(Signal.complete()))
.verifier(step -> step.expectNext(Signal.next(item(0)))
.expectNext(Signal.next(item(1)))
.consumeSubscriptionWith(s -> {
if(s instanceof FluxMaterialize.MaterializeSubscriber) {
FluxMaterialize.MaterializeSubscriber m =
(FluxMaterialize.MaterializeSubscriber) s;
m.peek();
m.poll();
m.size();
}
})
.expectNext(Signal.next(item(2)))
.thenRequest(1)
.expectNext(Signal.complete())
.consumeSubscriptionWith(s -> {
if(s instanceof FluxMaterialize.MaterializeSubscriber){
FluxMaterialize.MaterializeSubscriber m =
(FluxMaterialize.MaterializeSubscriber)s;
m.peek();
m.poll();
m.size();
assertThatExceptionOfType(UnsupportedOperationException.class)
.as("m.offer(null)")
.isThrownBy(() -> {
m.offer(null);
});
assertThatExceptionOfType(UnsupportedOperationException.class)
.as("m.iterator()")
.isThrownBy(() -> {
m.iterator();
});
}
})
.verifyComplete())
);
}
@Override
protected List<Scenario<String, Signal<String>>> scenarios_errorFromUpstreamFailure() {
return Arrays.asList(
scenario(Flux::materialize)
.verifier(step -> {
Hooks.onErrorDropped(c -> assertThat(c).hasMessage("dropped"));
Hooks.onNextDropped(c -> assertThat(c).isEqualTo("dropped"));
step.assertNext(s -> assertThat(s
.getThrowable()).hasMessage("test"))
.verifyComplete();
})
);
}
@Test
public void completeOnlyBackpressured() {
AssertSubscriber<Signal<Integer>> ts = AssertSubscriber.create(0L);
Flux.<Integer>empty().materialize()
.subscribe(ts);
ts.assertNoValues()
.assertNoError()
.assertNotComplete();
ts.request(1);
ts.assertValues(Signal.complete())
.assertNoError()
.assertComplete();
}
@Test
public void errorOnlyBackpressured() {
AssertSubscriber<Signal<Integer>> ts = AssertSubscriber.create(0L);
RuntimeException ex = new RuntimeException();
Flux.<Integer>error(ex).materialize()
.subscribe(ts);
ts.assertNoValues()
.assertNoError()
.assertNotComplete();
ts.request(1);
ts.assertValues(Signal.error(ex))
.assertNoError()
.assertComplete();
}
@Test
public void materialize() {
StepVerifier.create(Flux.just("Three", "Two", "One")
.materialize())
.expectNextMatches(s -> s.isOnNext() && "Three".equals(s.get()))
.expectNextMatches(s -> s.isOnNext() && "Two".equals(s.get()))
.expectNextMatches(s -> s.isOnNext() && "One".equals(s.get()))
.expectNextMatches(Signal::isOnComplete)
.verifyComplete();
}
@Test
public void materialize2() {
StepVerifier.create(Flux.just("Three", "Two")
.concatWith(Flux.error(new RuntimeException("test")))
.materialize())
.expectNextMatches(s -> s.isOnNext() && "Three".equals(s.get()))
.expectNextMatches(s -> s.isOnNext() && "Two".equals(s.get()))
.expectNextMatches(s -> s.isOnError() && s.getThrowable() != null
&& "test".equals(s.getThrowable().getMessage()))
.verifyComplete();
}
@Test
public void scanOperator(){
Flux<Integer> parent = Flux.just(1);
FluxMaterialize<Integer> test = new FluxMaterialize<>(parent);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void scanSubscriber() {
CoreSubscriber<Signal<String>> actual = new LambdaSubscriber<>(null, e -> {}, null, null);
FluxMaterialize.MaterializeSubscriber<String> test = new FluxMaterialize.MaterializeSubscriber<String>(actual);
Subscription parent = Operators.emptySubscription();
test.onSubscribe(parent);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
test.requested = 35;
assertThat(test.scan(Scannable.Attr.REQUESTED_FROM_DOWNSTREAM)).isEqualTo(35);
assertThat(test.scan(Scannable.Attr.BUFFERED)).isEqualTo(0); // RS: TODO non-zero size
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
test.terminalSignal = Signal.error(new IllegalStateException("boom"));
assertThat(test.scan(Scannable.Attr.ERROR)).hasMessage("boom");
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.cancel();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
}
|
FluxMaterializeTest
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/util/pattern/PathElement.java
|
{
"start": 1047,
"end": 3281
}
|
class ____ {
// Score related
protected static final int WILDCARD_WEIGHT = 100;
protected static final int CAPTURE_VARIABLE_WEIGHT = 1;
protected static final MultiValueMap<String,String> NO_PARAMETERS = new LinkedMultiValueMap<>();
// Position in the pattern where this path element starts
protected final int pos;
// The separator used in this path pattern
protected final char separator;
// The next path element in the chain
protected @Nullable PathElement next;
// The previous path element in the chain
protected @Nullable PathElement prev;
/**
* Create a new path element.
* @param pos the position where this path element starts in the pattern data
* @param separator the separator in use in the path pattern
*/
PathElement(int pos, char separator) {
this.pos = pos;
this.separator = separator;
}
/**
* Attempt to match this path element.
* @param candidatePos the current position within the candidate path
* @param matchingContext encapsulates context for the match including the candidate
* @return {@code true} if it matches, otherwise {@code false}
*/
public abstract boolean matches(int candidatePos, MatchingContext matchingContext);
/**
* Return the length of the path element where captures are considered to be one character long.
* @return the normalized length
*/
public abstract int getNormalizedLength();
public abstract char[] getChars();
/**
* Return the number of variables captured by the path element.
*/
public int getCaptureCount() {
return 0;
}
/**
* Return the number of wildcard elements (*, ?) in the path element.
*/
public int getWildcardCount() {
return 0;
}
/**
* Return the score for this PathElement, combined score is used to compare parsed patterns.
*/
public int getScore() {
return 0;
}
/**
* Return whether this PathElement can be strictly {@link String#compareTo(String) compared}
* against another element for matching.
*/
public boolean isLiteral() {
return false;
}
/**
* Return if there are no more PathElements in the pattern.
* @return {@code true} if the there are no more elements
*/
protected final boolean isNoMorePattern() {
return this.next == null;
}
}
|
PathElement
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/type/AbstractMethodMetadataTests.java
|
{
"start": 7717,
"end": 7818
}
|
class ____ {
@Tag
public final String test() {
return "";
}
}
public static
|
WithFinalMethod
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/codec/CipherCodec.java
|
{
"start": 7146,
"end": 8096
}
|
interface ____ {
/**
* Creates a new {@link Cipher}.
*
* @return a new {@link Cipher}.
* @throws GeneralSecurityException
* @param keyDescriptor the key to use for the returned {@link Cipher}.
*/
Cipher get(KeyDescriptor keyDescriptor) throws GeneralSecurityException;
/**
* Returns the latest {@link KeyDescriptor} to use for encryption.
*
* @return the {@link KeyDescriptor} to use for encryption.
*/
default KeyDescriptor encryptionKey() {
return KeyDescriptor.unnamed();
}
}
/**
* Descriptor to determine which crypto key to use. Allows versioning and usage of named keys. Key names must not contain
* dollar {@code $} or plus {@code +} characters as these characters are used within the message format to encode key name
* and key version.
*/
public static
|
CipherSupplier
|
java
|
netty__netty
|
handler/src/main/java/io/netty/handler/timeout/WriteTimeoutHandler.java
|
{
"start": 1809,
"end": 2408
}
|
class ____ extends {@link ChannelDuplexHandler} {
* {@code @Override}
* public void exceptionCaught({@link ChannelHandlerContext} ctx, {@link Throwable} cause)
* throws {@link Exception} {
* if (cause instanceof {@link WriteTimeoutException}) {
* // do something
* } else {
* super.exceptionCaught(ctx, cause);
* }
* }
* }
*
* {@link ServerBootstrap} bootstrap = ...;
* ...
* bootstrap.childHandler(new MyChannelInitializer());
* ...
* </pre>
* @see ReadTimeoutHandler
* @see IdleStateHandler
*/
public
|
MyHandler
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/subselect/SubselectAndSingleAttributeIdClassTest.java
|
{
"start": 5040,
"end": 5381
}
|
class ____ {
@Id
@Column(name = "ID", nullable = false, precision = 9)
private Integer id;
@ManyToOne(fetch = FetchType.LAZY)
private MyChild child;
public MyGrandchild() {
}
public MyGrandchild(int id) {
this.id = id;
}
public void setChild(MyChild child) {
this.child = child;
}
}
public static
|
MyGrandchild
|
java
|
reactor__reactor-core
|
benchmarks/src/main/java/reactor/CheckpointBenchmark.java
|
{
"start": 1096,
"end": 2279
}
|
class ____ {
Flux<String> findAllUserByName(Flux<String> source) {
return source.map(s -> { throw new IllegalStateException("boom"); })
.map(s -> s + "-user");
}
@Benchmark()
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 5, time = 1)
@Measurement(iterations = 5, time = 1)
@Fork(1)
@BenchmarkMode({Mode.Throughput, Mode.SampleTime})
public void withFullCheckpoint() {
this.findAllUserByName(Flux.just("pedro", "simon", "stephane"))
.transform(f -> f.filter(s -> s.startsWith("s")))
.transform(f -> f.elapsed())
.checkpoint("checkpoint description", true)
.subscribe(System.out::println, t -> {
});
}
@Benchmark
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 5, time = 1)
@Measurement(iterations = 5, time = 1)
@Fork(1)
@BenchmarkMode({Mode.Throughput, Mode.SampleTime})
public void withLightCheckpoint() {
this.findAllUserByName(Flux.just("pedro", "simon", "stephane"))
.transform(f -> f.filter(s -> s.startsWith("s")))
.transform(f -> f.elapsed())
.checkpoint("light checkpoint identifier")
.subscribe(System.out::println, t -> {
});
}
}
|
CheckpointBenchmark
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/utils/ReflectUtils.java
|
{
"start": 12285,
"end": 12404
}
|
class ____ desc.
* [int.class, boolean[].class, Object.class] => "I[ZLjava/lang/Object;"
*
* @param cs
|
array
|
java
|
apache__spark
|
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/SimpleDownloadFile.java
|
{
"start": 2114,
"end": 2854
}
|
class ____ implements DownloadFileWritableChannel {
private final WritableByteChannel channel;
SimpleDownloadWritableChannel() throws FileNotFoundException {
channel = Channels.newChannel(new FileOutputStream(file));
}
@Override
public ManagedBuffer closeAndRead() throws IOException {
channel.close();
return new FileSegmentManagedBuffer(transportConf, file, 0, file.length());
}
@Override
public int write(ByteBuffer src) throws IOException {
return channel.write(src);
}
@Override
public boolean isOpen() {
return channel.isOpen();
}
@Override
public void close() throws IOException {
channel.close();
}
}
}
|
SimpleDownloadWritableChannel
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/superbuilder/ChainedAccessorsCar.java
|
{
"start": 807,
"end": 1251
}
|
class ____<C extends ChainedAccessorsCar,
B extends ChainedAccessorsCarBuilder<C, B>> extends ChainedAccessorsVehicleBuilder<C, B> {
private String manufacturer;
public B manufacturer(String manufacturer) {
this.manufacturer = manufacturer;
return self();
}
protected abstract B self();
public abstract C build();
}
private static final
|
ChainedAccessorsCarBuilder
|
java
|
apache__rocketmq
|
client/src/main/java/org/apache/rocketmq/client/impl/consumer/RebalanceService.java
|
{
"start": 1088,
"end": 2604
}
|
class ____ extends ServiceThread {
private static long waitInterval =
Long.parseLong(System.getProperty(
"rocketmq.client.rebalance.waitInterval", "20000"));
private static long minInterval =
Long.parseLong(System.getProperty(
"rocketmq.client.rebalance.minInterval", "1000"));
private final Logger log = LoggerFactory.getLogger(RebalanceService.class);
private final MQClientInstance mqClientFactory;
private long lastRebalanceTimestamp = System.currentTimeMillis();
public RebalanceService(MQClientInstance mqClientFactory) {
this.mqClientFactory = mqClientFactory;
}
@Override
public void run() {
log.info(this.getServiceName() + " service started");
long realWaitInterval = waitInterval;
while (!this.isStopped()) {
this.waitForRunning(realWaitInterval);
long interval = System.currentTimeMillis() - lastRebalanceTimestamp;
if (interval < minInterval) {
realWaitInterval = minInterval - interval;
} else {
boolean balanced = this.mqClientFactory.doRebalance();
realWaitInterval = balanced ? waitInterval : minInterval;
lastRebalanceTimestamp = System.currentTimeMillis();
}
}
log.info(this.getServiceName() + " service end");
}
@Override
public String getServiceName() {
return RebalanceService.class.getSimpleName();
}
}
|
RebalanceService
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/suite/engine/SuiteLauncherDiscoveryRequestBuilderTests.java
|
{
"start": 6192,
"end": 6546
}
|
class ____ {
}
LauncherDiscoveryRequest request = builder.applySelectorsAndFiltersFromSuite(Suite.class).build();
List<ClassNameFilter> filters = request.getFiltersByType(ClassNameFilter.class);
assertTrue(exactlyOne(filters).apply(TestCase.class.getName()).excluded());
}
@Test
void excludeEngines() {
@ExcludeEngines("junit-jupiter")
|
Suite
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-resourceestimator/src/main/java/org/apache/hadoop/resourceestimator/common/exception/ResourceEstimatorException.java
|
{
"start": 955,
"end": 1186
}
|
class ____ extends Exception {
public ResourceEstimatorException(String message) {
super(message);
}
public ResourceEstimatorException(String message, Exception ex) {
super(message, ex);
}
}
|
ResourceEstimatorException
|
java
|
spring-projects__spring-boot
|
buildSrc/src/main/java/org/springframework/boot/build/classpath/CheckClasspathForUnnecessaryExclusions.java
|
{
"start": 1738,
"end": 5685
}
|
class ____ extends DefaultTask {
private static final Map<String, String> SPRING_BOOT_DEPENDENCIES_PROJECT = Collections.singletonMap("path",
":platform:spring-boot-dependencies");
private final Map<String, Set<String>> exclusionsByDependencyId = new TreeMap<>();
private final Map<String, Dependency> dependencyById = new HashMap<>();
private final Dependency platform;
private final DependencyHandler dependencies;
private final ConfigurationContainer configurations;
private Configuration classpath;
@Inject
public CheckClasspathForUnnecessaryExclusions(DependencyHandler dependencyHandler,
ConfigurationContainer configurations) {
this.dependencies = getProject().getDependencies();
this.configurations = getProject().getConfigurations();
this.platform = this.dependencies
.create(this.dependencies.platform(this.dependencies.project(SPRING_BOOT_DEPENDENCIES_PROJECT)));
getOutputs().upToDateWhen((task) -> true);
}
public void setClasspath(Configuration classpath) {
this.classpath = classpath;
this.exclusionsByDependencyId.clear();
this.dependencyById.clear();
classpath.getAllDependencies().all(this::processDependency);
}
@Classpath
public FileCollection getClasspath() {
return this.classpath;
}
private void processDependency(Dependency dependency) {
if (dependency instanceof ModuleDependency moduleDependency) {
processDependency(moduleDependency);
}
}
private void processDependency(ModuleDependency dependency) {
String dependencyId = getId(dependency);
TreeSet<String> exclusions = dependency.getExcludeRules()
.stream()
.map(this::getId)
.collect(Collectors.toCollection(TreeSet::new));
this.exclusionsByDependencyId.put(dependencyId, exclusions);
if (!exclusions.isEmpty()) {
this.dependencyById.put(dependencyId, this.dependencies.create(dependencyId));
}
}
@Input
Map<String, Set<String>> getExclusionsByDependencyId() {
return this.exclusionsByDependencyId;
}
@TaskAction
public void checkForUnnecessaryExclusions() {
Map<String, Set<String>> unnecessaryExclusions = new HashMap<>();
this.exclusionsByDependencyId.forEach((dependencyId, exclusions) -> {
if (!exclusions.isEmpty()) {
Dependency toCheck = this.dependencyById.get(dependencyId);
this.configurations.detachedConfiguration(toCheck, this.platform)
.getIncoming()
.getArtifacts()
.getArtifacts()
.stream()
.map(this::getId)
.forEach(exclusions::remove);
removeProfileExclusions(dependencyId, exclusions);
if (!exclusions.isEmpty()) {
unnecessaryExclusions.put(dependencyId, exclusions);
}
}
});
if (!unnecessaryExclusions.isEmpty()) {
throw new GradleException(getExceptionMessage(unnecessaryExclusions));
}
}
private void removeProfileExclusions(String dependencyId, Set<String> exclusions) {
if ("org.xmlunit:xmlunit-core".equals(dependencyId)) {
exclusions.remove("javax.xml.bind:jaxb-api");
}
}
private String getExceptionMessage(Map<String, Set<String>> unnecessaryExclusions) {
StringBuilder message = new StringBuilder("Unnecessary exclusions detected:");
for (Entry<String, Set<String>> entry : unnecessaryExclusions.entrySet()) {
message.append(String.format("%n %s", entry.getKey()));
for (String exclusion : entry.getValue()) {
message.append(String.format("%n %s", exclusion));
}
}
return message.toString();
}
private String getId(ResolvedArtifactResult artifact) {
return getId((ModuleComponentIdentifier) artifact.getId().getComponentIdentifier());
}
private String getId(ModuleDependency dependency) {
return dependency.getGroup() + ":" + dependency.getName();
}
private String getId(ExcludeRule rule) {
return rule.getGroup() + ":" + rule.getModule();
}
private String getId(ModuleComponentIdentifier identifier) {
return identifier.getGroup() + ":" + identifier.getModule();
}
}
|
CheckClasspathForUnnecessaryExclusions
|
java
|
apache__camel
|
components/camel-joor/src/test/java/org/apache/camel/language/joor/JoorBeanTest.java
|
{
"start": 1072,
"end": 2216
}
|
class ____ extends CamelTestSupport {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.transform().method(JoorBeanTest.class, "priority")
.to("mock:result");
}
};
}
@Test
public void testBean() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("User tony is a high roller", "Regular user",
"User scott is a high roller");
template.sendBodyAndHeader("direct:start", 123, "user", "tony");
template.sendBodyAndHeader("direct:start", 18, "user", "mickey");
template.sendBodyAndHeader("direct:start", 44, "user", "scott");
MockEndpoint.assertIsSatisfied(context);
}
public static String priority(@Joor("((int) body) / 2 > 10") boolean high, @Header("user") String user) {
if (high) {
return "User " + user + " is a high roller";
} else {
return "Regular user";
}
}
}
|
JoorBeanTest
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/filter/StatFilterExecuteFirstResultSetTest.java
|
{
"start": 2497,
"end": 2922
}
|
class ____ extends MockPreparedStatement {
public MyMockPreparedStatement(MockConnection conn, String sql) {
super(conn, sql);
}
public boolean execute() throws SQLException {
return true;
}
public ResultSet getResultSet() throws SQLException {
return getConnection().getDriver().executeQuery(this, getSql());
}
}
}
|
MyMockPreparedStatement
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java
|
{
"start": 5111,
"end": 5483
}
|
class ____ {
public static final Result EMPTY = new Result(Correction.EMPTY, Double.MIN_VALUE);
public final Correction[] corrections;
public final double cutoffScore;
private Result(Correction[] corrections, double cutoffScore) {
this.corrections = corrections;
this.cutoffScore = cutoffScore;
}
}
}
|
Result
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/builder/BuilderSimpleTest.java
|
{
"start": 2201,
"end": 2326
}
|
class ____
{
final int value;
protected ValueImmutable(int v) { value = v; }
}
static
|
ValueImmutable
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/metrics/stats/SampledStat.java
|
{
"start": 5277,
"end": 6959
}
|
class ____ {
public double initialValue;
public long eventCount;
public long startTimeMs;
public long lastEventMs;
public double value;
public long timeWindowMs;
public Sample(double initialValue, long now) {
this.initialValue = initialValue;
this.eventCount = 0;
this.startTimeMs = now;
this.lastEventMs = now;
this.value = initialValue;
this.timeWindowMs = -1;
}
public Sample(double initialValue, long now, long timeWindowMs) {
this.initialValue = initialValue;
this.eventCount = 0;
this.startTimeMs = now;
this.lastEventMs = now;
this.value = initialValue;
this.timeWindowMs = timeWindowMs;
}
public void reset(long now) {
this.eventCount = 0;
this.startTimeMs = now;
this.lastEventMs = now;
this.value = initialValue;
}
public boolean isComplete(long timeMs, MetricConfig config) {
long windowMs = timeWindowMs > 0 ? timeWindowMs : config.timeWindowMs();
return timeMs - startTimeMs >= windowMs || eventCount >= config.eventWindow();
}
@Override
public String toString() {
return "Sample(" +
"value=" + value +
", eventCount=" + eventCount +
", startTimeMs=" + startTimeMs +
", lastEventMs=" + lastEventMs +
", initialValue=" + initialValue +
", timeWindowMs=" + timeWindowMs +
')';
}
}
}
|
Sample
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_537/Issue537Test.java
|
{
"start": 588,
"end": 870
}
|
class ____ {
@ProcessorTest
public void testThatReferencedMapperWillBeUsed() {
Target target = Issue537Mapper.INSTANCE.mapDto( new Source( "abc" ) );
assertThat( target ).isNotNull();
assertThat( target.getValue() ).isEqualTo( 3 );
}
}
|
Issue537Test
|
java
|
google__guice
|
core/test/com/google/inject/internal/util/LineNumbersTest.java
|
{
"start": 1937,
"end": 1987
}
|
class ____ {
@Inject
A(B b) {}
}
public
|
A
|
java
|
micronaut-projects__micronaut-core
|
http-server-netty/src/main/java/io/micronaut/http/server/netty/handler/accesslog/Http2AccessLogFrameListener.java
|
{
"start": 1222,
"end": 2440
}
|
class ____ extends Http2FrameListenerDecorator {
private final Http2AccessLogManager manager;
public Http2AccessLogFrameListener(Http2FrameListener listener, Http2AccessLogManager manager) {
super(listener);
this.manager = manager;
}
private void logHeaders(ChannelHandlerContext ctx, int streamId, Http2Headers headers) throws Http2Exception {
HttpRequest request = HttpConversionUtil.toHttpRequest(streamId, headers, false);
manager.logHeaders(ctx, streamId, request);
}
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int padding, boolean endStream) throws Http2Exception {
logHeaders(ctx, streamId, headers);
super.onHeadersRead(ctx, streamId, headers, padding, endStream);
}
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endStream) throws Http2Exception {
logHeaders(ctx, streamId, headers);
super.onHeadersRead(ctx, streamId, headers, streamDependency, weight, exclusive, padding, endStream);
}
}
|
Http2AccessLogFrameListener
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/http/converter/cbor/JacksonCborHttpMessageConverterTests.java
|
{
"start": 3417,
"end": 4320
}
|
class ____ {
private String string;
private int number;
private float fraction;
private String[] array;
private boolean bool;
private byte[] bytes;
public byte[] getBytes() {
return bytes;
}
public void setBytes(byte[] bytes) {
this.bytes = bytes;
}
public boolean isBool() {
return bool;
}
public void setBool(boolean bool) {
this.bool = bool;
}
public String getString() {
return string;
}
public void setString(String string) {
this.string = string;
}
public int getNumber() {
return number;
}
public void setNumber(int number) {
this.number = number;
}
public float getFraction() {
return fraction;
}
public void setFraction(float fraction) {
this.fraction = fraction;
}
public String[] getArray() {
return array;
}
public void setArray(String[] array) {
this.array = array;
}
}
}
|
MyBean
|
java
|
quarkusio__quarkus
|
extensions/security-jpa-reactive/deployment/src/test/java/io/quarkus/security/jpa/reactive/MinimalUserEntity.java
|
{
"start": 493,
"end": 765
}
|
class ____ {
@Id
@GeneratedValue
public Long id;
@Column(name = "username")
@Username
public String name;
@Column(name = "password")
@Password(PasswordType.CLEAR)
public String pass;
@Roles
public String role;
}
|
MinimalUserEntity
|
java
|
hibernate__hibernate-orm
|
tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/collectionbasictype/ItemRoleConverter.java
|
{
"start": 242,
"end": 693
}
|
class ____ implements AttributeConverter<Role, String> {
@Override
public String convertToDatabaseColumn(Role role) {
return role == null ? null : role.getType();
}
@Override
public Role convertToEntityAttribute(String s) {
if ( s != null ) {
if ( RoleInternal.TYPE.equals( s ) ) {
return new RoleInternal();
}
else if ( RoleExternal.TYPE.equals( s ) ) {
return new RoleExternal();
}
}
return null;
}
}
|
ItemRoleConverter
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestFileFactory.java
|
{
"start": 6745,
"end": 7993
}
|
class ____ extends SimpleStreamFormat<RowData> {
@Override
public Reader<RowData> createReader(Configuration config, FSDataInputStream stream) {
BufferedReader reader =
new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8));
return new Reader<RowData>() {
@Override
public RowData read() throws IOException {
String line = reader.readLine();
if (line == null) {
return null;
}
return GenericRowData.of(StringData.fromString(line));
}
@Override
public void close() throws IOException {
reader.close();
}
};
}
@Override
public TypeInformation<RowData> getProducedType() {
// For ScanTableSource, the output type is determined by the planner,
// and the result of this method will not be used.
// The purpose of returning null is to verify that the planner can
// handle the output type correctly.
return null;
}
}
private static
|
FileFormat
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/api/resource/TestPlacementConstraints.java
|
{
"start": 2279,
"end": 2392
}
|
class ____ the various static methods in
* {@link org.apache.hadoop.yarn.api.resource.PlacementConstraints}.
*/
|
for
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/admin/RecordsToDelete.java
|
{
"start": 955,
"end": 2077
}
|
class ____ {
private final long offset;
private RecordsToDelete(long offset) {
this.offset = offset;
}
/**
* Delete all the records before the given {@code offset}
*
* @param offset The offset before which all records will be deleted.
* Use {@code -1} to truncate to the high watermark.
*/
public static RecordsToDelete beforeOffset(long offset) {
return new RecordsToDelete(offset);
}
/**
* The offset before which all records will be deleted.
* Use {@code -1} to truncate to the high watermark.
*/
public long beforeOffset() {
return offset;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RecordsToDelete that = (RecordsToDelete) o;
return this.offset == that.offset;
}
@Override
public int hashCode() {
return (int) offset;
}
@Override
public String toString() {
return "(beforeOffset = " + offset + ")";
}
}
|
RecordsToDelete
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/assertion/RecursiveAssertionDriver_CollectionPolicyTest.java
|
{
"start": 7546,
"end": 7616
}
|
class ____ {
Object[] array = new Object[2];
}
}
|
ClassWithArrayChild
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/legacy/RecursiveComparisonAssert_isEqualTo_comparingOnlyFields_Test.java
|
{
"start": 16998,
"end": 17785
}
|
class ____ {
String first;
String last;
public Name(String first, String last) {
this.first = first;
this.last = last;
}
}
// https://github.com/assertj/assertj/issues/3354
@Test
void checking_compared_fields_existence_should_skip_containers_in_field_location() {
// GIVEN
FriendlyPerson sherlock1 = new FriendlyPerson("Sherlock Holmes");
sherlock1.friends.add(new FriendlyPerson("Dr. John Watson"));
FriendlyPerson sherlock2 = new FriendlyPerson("Sherlock Holmes");
sherlock2.friends.add(new FriendlyPerson("Dr. John Watson"));
// WHEN/THEN
then(sherlock1).usingRecursiveComparison(recursiveComparisonConfiguration)
.comparingOnlyFields("friends.name")
.isEqualTo(sherlock2);
}
}
|
Name
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/security/inheritance/classpermitall/ClassPermitAllBaseResourceWithoutPathImplInterface_SecurityOnBase.java
|
{
"start": 1071,
"end": 2919
}
|
class ____
implements ClassPermitAllInterfaceWithPath_SecurityOnBase {
@Override
public String classPathOnInterface_ImplOnBase_ImplMethodWithPath_ClassPermitAll(JsonObject array) {
return CLASS_PATH_ON_INTERFACE + IMPL_ON_BASE + IMPL_METHOD_WITH_PATH + CLASS_PERMIT_ALL_PATH;
}
@PermitAll
@Override
public String classPathOnInterface_ImplOnBase_ImplMethodWithPath_ClassPermitAllMethodPermitAll(JsonObject array) {
return CLASS_PATH_ON_INTERFACE + IMPL_ON_BASE + IMPL_METHOD_WITH_PATH + CLASS_PERMIT_ALL_METHOD_PERMIT_ALL_PATH;
}
@Override
public String classPathOnInterface_ImplOnBase_InterfaceMethodWithPath_ClassPermitAll(JsonObject array) {
return CLASS_PATH_ON_INTERFACE + IMPL_ON_BASE + INTERFACE_METHOD_WITH_PATH + CLASS_PERMIT_ALL_PATH;
}
@PermitAll
@Override
public String classPathOnInterface_ImplOnBase_InterfaceMethodWithPath_ClassPermitAllMethodPermitAll(JsonObject array) {
return CLASS_PATH_ON_INTERFACE + IMPL_ON_BASE + INTERFACE_METHOD_WITH_PATH + CLASS_PERMIT_ALL_METHOD_PERMIT_ALL_PATH;
}
@Override
public ClassPermitAllSubResourceWithoutPath classPathOnInterface_SubDeclaredOnInterface_SubImplOnBase_ClassPermitAll() {
return new ClassPermitAllSubResourceWithoutPath(CLASS_PATH_ON_INTERFACE + SUB_DECLARED_ON_INTERFACE
+ SUB_IMPL_ON_BASE + CLASS_PERMIT_ALL_PATH);
}
@PermitAll
@Override
public ClassPermitAllSubResourceWithoutPath classPathOnInterface_SubDeclaredOnInterface_SubImplOnBase_ClassPermitAllMethodPermitAll() {
return new ClassPermitAllSubResourceWithoutPath(CLASS_PATH_ON_INTERFACE + SUB_DECLARED_ON_INTERFACE
+ SUB_IMPL_ON_BASE + CLASS_PERMIT_ALL_METHOD_PERMIT_ALL_PATH);
}
}
|
ClassPermitAllBaseResourceWithoutPathImplInterface_SecurityOnBase
|
java
|
quarkusio__quarkus
|
devtools/project-core-extension-codestarts/src/main/resources/codestarts/quarkus/extension-codestarts/azure-functions-codestart/java/src/test/java/org/acme/HttpResponseMessageMock.java
|
{
"start": 389,
"end": 1211
}
|
class ____ implements HttpResponseMessage {
private int httpStatusCode;
private HttpStatusType httpStatus;
private Object body;
private Map<String, String> headers;
public HttpResponseMessageMock(HttpStatusType status, Map<String, String> headers, Object body) {
this.httpStatus = status;
this.httpStatusCode = status.value();
this.headers = headers;
this.body = body;
}
@Override
public HttpStatusType getStatus() {
return this.httpStatus;
}
@Override
public int getStatusCode() {
return httpStatusCode;
}
@Override
public String getHeader(String key) {
return this.headers.get(key);
}
@Override
public Object getBody() {
return this.body;
}
public static
|
HttpResponseMessageMock
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/checkpointing/StreamCheckpointingITCase.java
|
{
"start": 11528,
"end": 12562
}
|
class ____ extends RichFilterFunction<String>
implements ListCheckpointed<Long> {
static long[] counts = new long[PARALLELISM];
private long count;
@Override
public boolean filter(String value) {
count++;
return value.length() < 100;
}
@Override
public void close() {
counts[getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()] = count;
}
@Override
public List<Long> snapshotState(long checkpointId, long timestamp) throws Exception {
return Collections.singletonList(this.count);
}
@Override
public void restoreState(List<Long> state) throws Exception {
if (state.isEmpty() || state.size() > 1) {
throw new RuntimeException(
"Test failed due to unexpected recovered state size " + state.size());
}
this.count = state.get(0);
}
}
private static
|
StringRichFilterFunction
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/MergedContextConfiguration.java
|
{
"start": 10673,
"end": 13126
}
|
class ____ which the configuration was merged
* @param locations the merged context resource locations
* @param classes the merged annotated classes
* @param contextInitializerClasses the merged context initializer classes
* @param activeProfiles the merged active bean definition profiles
* @param propertySourceLocations the merged {@code PropertySource} locations
* @param propertySourceProperties the merged {@code PropertySource} properties
* @param contextCustomizers the context customizers
* @param contextLoader the resolved {@code ContextLoader}
* @param cacheAwareContextLoaderDelegate a cache-aware context loader
* delegate with which to retrieve the parent {@code ApplicationContext}
* @param parent the parent configuration or {@code null} if there is no parent
* @since 4.3
* @deprecated since 6.1 in favor of
* {@link #MergedContextConfiguration(Class, String[], Class[], Set, String[], List, String[], Set, ContextLoader, CacheAwareContextLoaderDelegate, MergedContextConfiguration)}
*/
@Deprecated(since = "6.1")
public MergedContextConfiguration(Class<?> testClass, String @Nullable [] locations, Class<?> @Nullable [] classes,
@Nullable Set<Class<? extends ApplicationContextInitializer<?>>> contextInitializerClasses,
String @Nullable [] activeProfiles, String @Nullable [] propertySourceLocations,
String @Nullable [] propertySourceProperties, @Nullable Set<ContextCustomizer> contextCustomizers,
ContextLoader contextLoader, @Nullable CacheAwareContextLoaderDelegate cacheAwareContextLoaderDelegate,
@Nullable MergedContextConfiguration parent) {
this(testClass, locations, classes, contextInitializerClasses, activeProfiles,
List.of(new PropertySourceDescriptor(processStrings(propertySourceLocations))),
propertySourceProperties, contextCustomizers, contextLoader, cacheAwareContextLoaderDelegate,
parent);
}
/**
* Create a new {@code MergedContextConfiguration} instance for the supplied
* parameters.
* <p>If a {@code null} value is supplied for {@code locations}, {@code classes},
* {@code activeProfiles}, or {@code propertySourceProperties} an empty array
* will be stored instead. If a {@code null} value is supplied for
* {@code contextInitializerClasses} or {@code contextCustomizers}, an empty
* set will be stored instead. Furthermore, active profiles will be sorted,
* and duplicate profiles will be removed.
* @param testClass the test
|
for
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/api/search/index/PhoneticMatcher.java
|
{
"start": 698,
"end": 985
}
|
enum ____ {
DM_EN("dm:en"),
DM_FR("dm:fr"),
DM_PT("dm:pt"),
DM_ES("dm:es");
private final String value;
PhoneticMatcher(String value) {
this.value = value;
}
@Override
public String toString() {
return value;
}
}
|
PhoneticMatcher
|
java
|
apache__camel
|
core/camel-main/src/main/java/org/apache/camel/main/DefaultConfigurationProperties.java
|
{
"start": 80187,
"end": 111327
}
|
class ____ (org.apache.camel.processor.LogProcessor) <br/>
* ${contextId} - the camel context id <br/>
* ${routeId} - the route id <br/>
* ${groupId} - the route group id <br/>
* ${nodeId} - the node id <br/>
* ${nodePrefixId} - the node prefix id <br/>
* ${source} - the source:line (source location must be enabled) <br/>
* ${source.name} - the source filename (source location must be enabled) <br/>
* ${source.line} - the source line number (source location must be enabled)
*
* For example to use the route and node id you can specify the name as: ${routeId}/${nodeId}
*/
public T withLogName(String logName) {
this.logName = logName;
return (T) this;
}
/**
* To configure the language to use for Log EIP. By default, the simple language is used. However, Camel also
* supports other languages such as groovy.
*/
public T withLogLanguage(String logLanguage) {
this.logLanguage = logLanguage;
return (T) this;
}
/**
* Camel comes with a default set of sensitive keywords which are automatically masked. This option allows to add
* additional custom keywords to be masked as well. Multiple keywords can be separated by comma.
*/
public T withAdditionalSensitiveKeywords(String additionalSensitiveKeywords) {
this.additionalSensitiveKeywords = additionalSensitiveKeywords;
return (T) this;
}
/**
* Sets whether the object should automatically start when Camel starts. Important: Currently only routes can be
* disabled, as CamelContext's are always started. Note: When setting auto startup false on CamelContext then that
* takes precedence and no routes is started. You would need to start CamelContext explicit using the
* org.apache.camel.CamelContext.start() method, to start the context, and then you would need to start the routes
* manually using CamelContext.getRouteController().startRoute(String).
*
* Default is true to always start up.
*/
public T withAutoStartup(boolean autoStartup) {
this.autoStartup = autoStartup;
return (T) this;
}
/**
* Used for exclusive filtering of routes to not automatically start with Camel starts.
*
* The pattern support matching by route id or endpoint urls.
*
* Multiple patterns can be specified separated by comma, as example, to exclude all the routes starting from kafka
* or jms use: kafka,jms.
*/
public T withAutoStartupExcludePattern(String autoStartupExcludePattern) {
this.autoStartupExcludePattern = autoStartupExcludePattern;
return (T) this;
}
/**
* Sets whether to allow access to the original message from Camel's error handler, or from
* org.apache.camel.spi.UnitOfWork.getOriginalInMessage(). Turning this off can optimize performance, as defensive
* copy of the original message is not needed.
*
* Default is false.
*/
public T withAllowUseOriginalMessage(boolean allowUseOriginalMessage) {
this.allowUseOriginalMessage = allowUseOriginalMessage;
return (T) this;
}
/**
* Whether to use case sensitive or insensitive headers.
*
* Important: When using case sensitive (this is set to false). Then the map is case sensitive which means headers
* such as content-type and Content-Type are two different keys which can be a problem for some protocols such as
* HTTP based, which rely on case insensitive headers. However case sensitive implementations can yield faster
* performance. Therefore use case sensitive implementation with care.
*
* Default is true.
*/
public T withCaseInsensitiveHeaders(boolean caseInsensitiveHeaders) {
this.caseInsensitiveHeaders = caseInsensitiveHeaders;
return (T) this;
}
/**
* Sets whether endpoint runtime statistics is enabled (gathers runtime usage of each incoming and outgoing
* endpoints).
*
* The default value is false.
*/
public T withEndpointRuntimeStatisticsEnabled(boolean endpointRuntimeStatisticsEnabled) {
this.endpointRuntimeStatisticsEnabled = endpointRuntimeStatisticsEnabled;
return (T) this;
}
/**
* Sets whether context load statistics is enabled (something like the unix load average).
*
* The default value is false.
*/
public T withLoadStatisticsEnabled(boolean loadStatisticsEnabled) {
this.loadStatisticsEnabled = loadStatisticsEnabled;
return (T) this;
}
/**
* Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow
* CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause
* the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled
* during routing messages via Camel's routing error handlers. Beware that when the first message is processed then
* creating and starting the producer may take a little time and prolong the total processing time of the
* processing.
*
* The default value is false.
*/
public T withEndpointLazyStartProducer(boolean endpointLazyStartProducer) {
this.endpointLazyStartProducer = endpointLazyStartProducer;
return (T) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler, which mean any exceptions occurred while the
* consumer is trying to pickup incoming messages, or the likes, will now be processed as a message and handled by
* the routing Error Handler.
* <p/>
* By default the consumer will use the org.apache.camel.spi.ExceptionHandler to deal with exceptions, that will be
* logged at WARN/ERROR level and ignored.
*
* The default value is false.
*/
public T withEndpointBridgeErrorHandler(boolean endpointBridgeErrorHandler) {
this.endpointBridgeErrorHandler = endpointBridgeErrorHandler;
return (T) this;
}
/**
* Whether to enable using data type on Camel messages.
*
* Data type are automatic turned on if one ore more routes has been explicit configured with input and output
* types. Otherwise data type is default off.
*/
public T withUseDataType(boolean useDataType) {
this.useDataType = useDataType;
return (T) this;
}
/**
* Set whether breadcrumb is enabled. The default value is false.
*/
public T withUseBreadcrumb(boolean useBreadcrumb) {
this.useBreadcrumb = useBreadcrumb;
return (T) this;
}
/**
* Can be used to turn off bean post processing.
*
* Be careful to turn this off, as this means that beans that use Camel annotations such as
* {@link org.apache.camel.EndpointInject}, {@link org.apache.camel.ProducerTemplate},
* {@link org.apache.camel.Produce}, {@link org.apache.camel.Consume} etc will not be injected and in use.
*
* Turning this off should only be done if you are sure you do not use any of these Camel features.
*
* Not all runtimes allow turning this off.
*
* The default value is true (enabled).
*/
public T withBeanPostProcessorEnabled(boolean beanPostProcessorEnabled) {
this.beanPostProcessorEnabled = beanPostProcessorEnabled;
return (T) this;
}
/**
* Sets the mbeans registration level.
*
* The default value is Default.
*/
public T withJmxManagementMBeansLevel(ManagementMBeansLevel jmxManagementMBeansLevel) {
this.jmxManagementMBeansLevel = jmxManagementMBeansLevel;
return (T) this;
}
/**
* Sets the JMX statistics level The level can be set to Extended to gather additional information
*
* The default value is Default.
*/
public T withJmxManagementStatisticsLevel(ManagementStatisticsLevel jmxManagementStatisticsLevel) {
this.jmxManagementStatisticsLevel = jmxManagementStatisticsLevel;
return (T) this;
}
/**
* The naming pattern for creating the CamelContext JMX management name.
*
* The default pattern is #name#
*/
public T withJmxManagementNamePattern(String jmxManagementNamePattern) {
this.jmxManagementNamePattern = jmxManagementNamePattern;
return (T) this;
}
/**
* Whether routes created by Kamelets should be registered for JMX management. Enabling this allows to have
* fine-grained monitoring and management of every route created via Kamelets.
*
* This is default disabled as a Kamelet is intended as a component (black-box) and its implementation details as
* Camel route makes the overall management and monitoring of Camel applications more verbose.
*
* During development of Kamelets then enabling this will make it possible for developers to do fine-grained
* performance inspection and identify potential bottlenecks in the Kamelet routes.
*
* However, for production usage then keeping this disabled is recommended.
*/
public T withJmxManagementRegisterRoutesCreateByKamelet(boolean jmxManagementRegisterRoutesCreateByKamelet) {
this.jmxManagementRegisterRoutesCreateByKamelet = jmxManagementRegisterRoutesCreateByKamelet;
return (T) this;
}
/**
* Whether routes created by route templates (not Kamelets) should be registered for JMX management. Enabling this
* allows to have fine-grained monitoring and management of every route created via route templates.
*
* This is default enabled (unlike Kamelets) as routes created via templates is regarded as standard routes, and
* should be available for management and monitoring.
*/
public T withJmxManagementRegisterRoutesCreateByTemplate(boolean jmxManagementRegisterRoutesCreateByTemplate) {
this.jmxManagementRegisterRoutesCreateByTemplate = jmxManagementRegisterRoutesCreateByTemplate;
return (T) this;
}
/**
* Whether to include timestamps for all emitted Camel Events. Enabling this allows to know fine-grained at what
* time each event was emitted, which can be used for reporting to report exactly the time of the events. This is by
* default false to avoid the overhead of including this information.
*/
public T withCamelEventsTimestampEnabled(boolean camelEventsTimestampEnabled) {
this.camelEventsTimestampEnabled = camelEventsTimestampEnabled;
return (T) this;
}
/**
* To turn on MDC logging
*/
public T withUseMdcLogging(boolean useMdcLogging) {
this.useMdcLogging = useMdcLogging;
return (T) this;
}
/**
* Sets the thread name pattern used for creating the full thread name.
*
* The default pattern is: Camel (#camelId#) thread ##counter# - #name#
*
* Where #camelId# is the name of the CamelContext. and #counter# is a unique incrementing counter. and #name# is
* the regular thread name.
*
* You can also use #longName# which is the long thread name which can includes endpoint parameters etc.
*/
public T withThreadNamePattern(String threadNamePattern) {
this.threadNamePattern = threadNamePattern;
return (T) this;
}
/**
* Used for filtering routes routes matching the given pattern, which follows the following rules:
*
* - Match by route id - Match by route input endpoint uri
*
* The matching is using exact match, by wildcard and regular expression as documented by
* {@link PatternHelper#matchPattern(String, String)}.
*
* For example to only include routes which starts with foo in their route id's, use: include=foo* And to
* exclude routes which starts from JMS endpoints, use: exclude=jms:*
*
* Multiple patterns can be separated by comma, for example to exclude both foo and bar routes, use:
* exclude=foo*,bar*
*
* Exclude takes precedence over include.
*/
public T withRouteFilterIncludePattern(String routeFilterIncludePattern) {
this.routeFilterIncludePattern = routeFilterIncludePattern;
return (T) this;
}
/**
* Used for filtering routes routes matching the given pattern, which follows the following rules:
*
* - Match by route id - Match by route input endpoint uri
*
* The matching is using exact match, by wildcard and regular expression as documented by
* {@link PatternHelper#matchPattern(String, String)}.
*
* For example to only include routes which starts with foo in their route id's, use: include=foo* And to
* exclude routes which starts from JMS endpoints, use: exclude=jms:*
*
* Multiple patterns can be separated by comma, for example to exclude both foo and bar routes, use:
* exclude=foo*,bar*
*
* Exclude takes precedence over include.
*/
public T withRouteFilterExcludePattern(String routeFilterExcludePattern) {
this.routeFilterExcludePattern = routeFilterExcludePattern;
return (T) this;
}
/**
* Sets whether bean introspection uses extended statistics. The default is false.
*/
public T withBeanIntrospectionExtendedStatistics(boolean beanIntrospectionExtendedStatistics) {
this.beanIntrospectionExtendedStatistics = beanIntrospectionExtendedStatistics;
return (T) this;
}
/**
* Sets the logging level used by bean introspection, logging activity of its usage. The default is TRACE.
*/
public T withBeanIntrospectionLoggingLevel(LoggingLevel beanIntrospectionLoggingLevel) {
this.beanIntrospectionLoggingLevel = beanIntrospectionLoggingLevel;
return (T) this;
}
/**
* Tracing pattern to match which node EIPs to trace. For example to match all To EIP nodes, use to*. The pattern
* matches by node and route id's Multiple patterns can be separated by comma.
*/
public T withTracingPattern(String tracingPattern) {
this.tracingPattern = tracingPattern;
return (T) this;
}
/**
* To use a custom tracing logging format.
*
* The default format (arrow, routeId, label) is: %-4.4s [%-12.12s] [%-33.33s]
*/
public T withTracingLoggingFormat(String format) {
this.tracingLoggingFormat = format;
return (T) this;
}
/**
* Sets the pattern used for determine which custom MDC keys to propagate during message routing when the routing
* engine continues routing asynchronously for the given message. Setting this pattern to * will propagate all
* custom keys. Or setting the pattern to foo*,bar* will propagate any keys starting with either foo or bar. Notice
* that a set of standard Camel MDC keys are always propagated which starts with camel. as key name.
*
* The match rules are applied in this order (case insensitive):
*
* 1. exact match, returns true 2. wildcard match (pattern ends with a * and the name starts with the pattern),
* returns true 3. regular expression match, returns true 4. otherwise returns false
*/
public T withMdcLoggingKeysPattern(String mdcLoggingKeysPattern) {
this.mdcLoggingKeysPattern = mdcLoggingKeysPattern;
return (T) this;
}
/**
* Whether the routes collector is enabled or not.
*
* When enabled Camel will auto-discover routes (RouteBuilder instances from the registry and also load additional
* XML routes from the file system.
*
* The routes collector is default enabled.
*/
public T withRoutesCollectorEnabled(boolean routesCollectorEnabled) {
this.routesCollectorEnabled = routesCollectorEnabled;
return (T) this;
}
/**
* Whether the routes collector should ignore any errors during loading and compiling routes.
*
* This is only intended for development or tooling.
*/
public T withRoutesCollectorIgnoreLoadingError(boolean routesCollectorIgnoreLoadingError) {
this.routesCollectorIgnoreLoadingError = routesCollectorIgnoreLoadingError;
return (T) this;
}
/**
* Work directory for compiler. Can be used to write compiled classes or other resources.
*/
public T withCompileWorkDir(String compileWorkDir) {
this.compileWorkDir = compileWorkDir;
return (T) this;
}
/**
* Used for inclusive filtering component scanning of RouteBuilder classes with @Component annotation. The exclusive
* filtering takes precedence over inclusive filtering. The pattern is using Ant-path style pattern.
*
* Multiple patterns can be specified separated by comma. For example to include all classes starting with Foo use:
* **/Foo* To include all routes form a specific package use: com/mycompany/foo/* To include all routes
* form a specific package and its sub-packages use double wildcards: com/mycompany/foo/** And to include
* all routes from two specific packages use: com/mycompany/foo/*,com/mycompany/stuff/*
*/
public T withJavaRoutesIncludePattern(String javaRoutesIncludePattern) {
this.javaRoutesIncludePattern = javaRoutesIncludePattern;
return (T) this;
}
/**
* Used for exclusive filtering component scanning of RouteBuilder classes with @Component annotation. The exclusive
* filtering takes precedence over inclusive filtering. The pattern is using Ant-path style pattern. Multiple
* patterns can be specified separated by comma.
*
* For example to exclude all classes starting with Bar use: **/Bar* To exclude all routes form a
* specific package use: com/mycompany/bar/* To exclude all routes form a specific package and its sub-packages
* use double wildcards: com/mycompany/bar/** And to exclude all routes from two specific packages use:
* com/mycompany/bar/*,com/mycompany/stuff/*
*/
public T withJavaRoutesExcludePattern(String javaRoutesExcludePattern) {
this.javaRoutesExcludePattern = javaRoutesExcludePattern;
return (T) this;
}
/**
* Used for inclusive filtering of routes from directories. The exclusive filtering takes precedence over inclusive
* filtering. The pattern is using Ant-path style pattern.
*
* Multiple patterns can be specified separated by comma, as example, to include all the routes from a directory
* whose name contains foo use: **/*foo*.
*/
public T withRoutesIncludePattern(String routesIncludePattern) {
this.routesIncludePattern = routesIncludePattern;
return (T) this;
}
/**
* Used for exclusive filtering of routes from directories. The exclusive filtering takes precedence over inclusive
* filtering. The pattern is using Ant-path style pattern.
*
* Multiple patterns can be specified separated by comma, as example, to exclude all the routes from a directory
* whose name contains foo use: **/*foo*.
*/
public T withRoutesExcludePattern(String routesExcludePattern) {
this.routesExcludePattern = routesExcludePattern;
return (T) this;
}
/**
* Used for enabling context reloading. If enabled then Camel allow external systems such as security vaults (AWS
* secrets manager, etc.) to trigger refreshing Camel by updating property placeholders and reload all existing
* routes to take changes into effect.
*/
public T withContextReloadEnabled(boolean contextReloadEnabled) {
this.contextReloadEnabled = contextReloadEnabled;
return (T) this;
}
/**
* Used for enabling automatic routes reloading. If enabled then Camel will watch for file changes in the given
* reload directory, and trigger reloading routes if files are changed.
*/
public T withRoutesReloadEnabled(boolean routesReloadEnabled) {
this.routesReloadEnabled = routesReloadEnabled;
return (T) this;
}
/**
* Directory to scan (incl subdirectories) for route changes. Camel cannot scan the classpath, so this must be
* configured to a file directory. Development with Maven as build tool, you can configure the directory to be
* src/main/resources to scan for Camel routes in XML or YAML files.
*/
public T withRoutesReloadDirectory(String routesReloadDirectory) {
this.routesReloadDirectory = routesReloadDirectory;
return (T) this;
}
/**
* Whether the directory to scan should include sub directories.
*
* Depending on the number of sub directories, then this can cause the JVM to startup slower as Camel uses the JDK
* file-watch service to scan for file changes.
*/
public T withRoutesReloadDirectoryRecursive(boolean routesReloadDirectoryRecursive) {
this.routesReloadDirectoryRecursive = routesReloadDirectoryRecursive;
return (T) this;
}
/**
* Used for inclusive filtering of routes from directories.
*
* Typical used for specifying to accept routes in XML or YAML files. The default pattern is <tt>*.yaml,*.xml</tt>
* Multiple patterns can be specified separated by comma.
*/
public T withRoutesReloadPattern(String routesReloadPattern) {
this.routesReloadPattern = routesReloadPattern;
return (T) this;
}
/**
* When reloading routes should all existing routes be stopped and removed.
*
* By default, Camel will stop and remove all existing routes before reloading routes. This ensures that only the
* reloaded routes will be active. If disabled then only routes with the same route id is updated, and any existing
* routes are continued to run.
*/
public T withRoutesReloadRemoveAllRoutes(boolean routesReloadRemoveAllRoutes) {
this.routesReloadRemoveAllRoutes = routesReloadRemoveAllRoutes;
return (T) this;
}
/**
* Whether to restart max duration when routes are reloaded. For example if max duration is 60 seconds, and a route
* is reloaded after 25 seconds, then this will restart the count and wait 60 seconds again.
*/
public T withRoutesReloadRestartDuration(boolean routesReloadRestartDuration) {
this.routesReloadRestartDuration = routesReloadRestartDuration;
return (T) this;
}
/**
* Controls whether to pool (reuse) exchanges or create new fresh exchanges (default). Using pooled will reduce JVM
* garbage collection overhead by avoiding to re-create Exchange instances per message each consumer receives.
*/
public T withExchangeFactory(String exchangeFactory) {
this.exchangeFactory = exchangeFactory;
return (T) this;
}
/**
* The capacity the pool (for each consumer) uses for storing exchanges. The default capacity is 100.
*/
public T withExchangeFactoryCapacity(int exchangeFactoryCapacity) {
this.exchangeFactoryCapacity = exchangeFactoryCapacity;
return (T) this;
}
/**
* Configures whether statistics is enabled on exchange factory.
*/
public T withExchangeFactoryStatisticsEnabled(boolean exchangeFactoryStatisticsEnabled) {
this.exchangeFactoryStatisticsEnabled = exchangeFactoryStatisticsEnabled;
return (T) this;
}
/**
* If dumping is enabled then Camel will during startup dump all loaded routes (incl rests and route templates)
* represented as XML/YAML DSL into the log. This is intended for trouble shooting or to assist during development.
*
* Sensitive information that may be configured in the route endpoints could potentially be included in the dump
* output and is therefore not recommended being used for production usage.
*
* This requires to have camel-xml-io/camel-yaml-io on the classpath to be able to dump the routes as XML/YAML.
*/
public T withDumpRoutes(String dumpRoutes) {
this.dumpRoutes = dumpRoutes;
return (T) this;
}
/**
* Controls what to include in output for route dumping.
*
* Possible values: all, routes, rests, routeConfigurations, routeTemplates, beans, dataFormats. Multiple values can
* be separated by comma. Default is routes.
*/
public T withDumpRoutesInclude(String dumpRoutesInclude) {
this.dumpRoutesInclude = dumpRoutesInclude;
return (T) this;
}
/**
* Whether to log route dumps to Logger
*/
public T withDumpRoutesLog(boolean dumpRoutesLog) {
this.dumpRoutesLog = dumpRoutesLog;
return (T) this;
}
/**
* Whether to resolve property placeholders in the dumped output. Default is true.
*/
public T withDumpRoutesResolvePlaceholders(boolean dumpRoutesResolvePlaceholders) {
this.dumpRoutesResolvePlaceholders = dumpRoutesResolvePlaceholders;
return (T) this;
}
/**
* When dumping routes to YAML format, then this option controls whether endpoint URIs should be expanded into a
* key/value parameters.
*/
public T withDumpRoutesUriAsParameters(boolean dumpRoutesUriAsParameters) {
this.dumpRoutesUriAsParameters = dumpRoutesUriAsParameters;
return (T) this;
}
/**
* Whether to include auto generated IDs in the dumped output. Default is false.
*/
public T withDumpRoutesGeneratedIds(boolean dumpRoutesGeneratedIds) {
this.dumpRoutesGeneratedIds = dumpRoutesGeneratedIds;
return (T) this;
}
/**
* Whether to save route dumps to an output file.
*
* If the output is a filename, then all content is saved to this file. If the output is a directory name, then one
* or more files are saved to the directory, where the names are based on the original source file names, or auto
* generated names.
*/
public T withDumpRoutesOutput(String dumpRoutesOutput) {
this.dumpRoutesOutput = dumpRoutesOutput;
return (T) this;
}
/**
* Sets global options that can be referenced in the camel context
* <p/>
* <b>Important:</b> This has nothing to do with property placeholders, and is just a plain set of key/value pairs
* which are used to configure global options on CamelContext, such as a maximum debug logging length etc.
*/
public T withGlobalOptions(Map<String, String> globalOptions) {
if (this.globalOptions == null) {
this.globalOptions = new HashMap<>();
}
this.globalOptions.putAll(globalOptions);
return (T) this;
}
/**
* Sets global options that can be referenced in the camel context
* <p/>
* <b>Important:</b> This has nothing to do with property placeholders, and is just a plain set of key/value pairs
* which are used to configure global options on CamelContext, such as a maximum debug logging length etc.
*/
public T withGlobalOption(String key, String value) {
if (this.globalOptions == null) {
this.globalOptions = new HashMap<>();
}
this.globalOptions.put(key, value);
return (T) this;
}
/**
* To use startup recorder for capturing execution time during starting Camel. The recorder can be one of: false (or
* off), logging, backlog, java-flight-recorder (or jfr).
*
* The default is false.
*/
public T withStartupRecorder(String startupRecorder) {
this.startupRecorder = startupRecorder;
return (T) this;
}
/**
* To filter our sub steps at a maximum depth.
*
* Use -1 for no maximum. Use 0 for no sub steps. Use 1 for max 1 sub step, and so forth.
*
* The default is -1.
*/
public T withStartupRecorderMaxDepth(int startupRecorderMaxDepth) {
this.startupRecorderMaxDepth = startupRecorderMaxDepth;
return (T) this;
}
/**
* To enable Java Flight Recorder to start a recording and automatic dump the recording to disk after startup is
* complete.
*
* This requires that camel-jfr is on the classpath, and to enable this option.
*/
public T withStartupRecorderRecording(boolean startupRecorderRecording) {
this.startupRecorderRecording = startupRecorderRecording;
return (T) this;
}
/**
* To use a specific Java Flight Recorder profile configuration, such as default or profile.
*
* The default is default.
*/
public T withStartupRecorderProfile(String startupRecorderProfile) {
this.startupRecorderProfile = startupRecorderProfile;
return (T) this;
}
/**
* How long time to run the startup recorder.
*
* Use 0 (default) to keep the recorder running until the JVM is exited. Use -1 to stop the recorder right after
* Camel has been started (to only focus on potential Camel startup performance bottlenecks) Use a positive value to
* keep recording for N seconds.
*
* When the recorder is stopped then the recording is auto saved to disk (note: save to disk can be disabled by
* setting startupRecorderDir to false)
*/
public T withStartupRecorderDuration(long startupRecorderDuration) {
this.startupRecorderDuration = startupRecorderDuration;
return (T) this;
}
/**
* Directory to store the recording. By default the current directory will be used.
*/
public T withStartupRecorderDir(String startupRecorderDir) {
this.startupRecorderDir = startupRecorderDir;
return (T) this;
}
/**
* Directories to scan for groovy source to be pre-compiled. For example: scripts/*.groovy will scan inside the
* classpath folder scripts for all groovy source files.
*
* By default, sources are scanned from the classpath, but you can prefix with file: to use file system.
*
* The directories are using Ant-path style pattern, and multiple directories can be specified separated by comma.
*
* This requires having camel-groovy JAR on the classpath.
*/
public T withGroovyScriptPattern(String groovyScriptPattern) {
this.groovyScriptPattern = groovyScriptPattern;
return (T) this;
}
/**
* Whether to preload existing compiled Groovy sources from the compileWorkDir option on startup. This can be
* enabled to avoid compiling sources that already has been compiled during a build phase.
*/
public T withGroovyPreloadCompiled(boolean groovyPreloadCompiled) {
this.groovyPreloadCompiled = groovyPreloadCompiled;
return (T) this;
}
/**
* Whether to use cloud properties location setting. Default is none.
*/
public T withCloudPropertiesLocation(boolean dumpRoutesResolvePlaceholders) {
this.cloudPropertiesLocation = cloudPropertiesLocation;
return (T) this;
}
}
|
name
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/SubcomponentCreatorValidationTest.java
|
{
"start": 19126,
"end": 19455
}
|
class ____ {",
" @Provides String s() { return \"\"; }",
"}");
Source componentFile =
preprocessedJavaSource(
"test.ParentComponent",
"package test;",
"",
"import dagger.Component;",
"",
"@Component",
"
|
TestModule
|
java
|
elastic__elasticsearch
|
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/EmbeddedProviderPlugin.java
|
{
"start": 1007,
"end": 1850
}
|
class ____ implements Plugin<Project> {
static final Attribute<Boolean> IMPL_ATTR = Attribute.of("is.impl", Boolean.class);
@Override
public void apply(Project project) {
project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> {
transformSpec.getFrom().attribute(ARTIFACT_TYPE_ATTRIBUTE, JAR_TYPE).attribute(IMPL_ATTR, true);
transformSpec.getTo().attribute(ARTIFACT_TYPE_ATTRIBUTE, DIRECTORY_TYPE).attribute(IMPL_ATTR, true);
transformSpec.parameters(parameters -> parameters.getIncludeArtifactName().set(true));
});
TaskProvider<Task> metaTask = project.getTasks().register("generateProviderImpls");
project.getExtensions().create("embeddedProviders", EmbeddedProviderExtension.class, project, metaTask);
}
}
|
EmbeddedProviderPlugin
|
java
|
apache__camel
|
components/camel-twitter/src/main/java/org/apache/camel/component/twitter/timeline/HomeConsumerHandler.java
|
{
"start": 1085,
"end": 1711
}
|
class ____ extends AbstractStatusConsumerHandler {
public HomeConsumerHandler(TwitterEndpoint endpoint) {
super(endpoint);
}
@Override
protected List<Status> doPoll() throws TwitterException {
Paging paging = getLastIdPaging();
log.trace("doPoll.getHomeTimeline(sinceId={})", paging.sinceId);
return getTwitter().v1().timelines().getHomeTimeline(paging);
}
@Override
protected List<Status> doDirect() throws TwitterException {
log.trace("doDirect.getHomeTimeline()");
return getTwitter().v1().timelines().getHomeTimeline();
}
}
|
HomeConsumerHandler
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/ClassFieldTest.java
|
{
"start": 848,
"end": 1067
}
|
class ____ {
private Class<?> value;
public Class<?> getValue() {
return value;
}
public void setValue(Class<?> value) {
this.value = value;
}
}
}
|
User
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-jsonb-common/runtime/src/main/java/io/quarkus/resteasy/reactive/jsonb/common/runtime/serialisers/VertxJson.java
|
{
"start": 3997,
"end": 4273
}
|
class ____ implements JsonbSerializer<JsonObject> {
@Override
public void serialize(JsonObject json, JsonGenerator generator, SerializationContext ctxt) {
ctxt.serialize(json.getMap(), generator);
}
}
public static
|
JsonObjectSerializer
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/table/SqlTruncateTableConverter.java
|
{
"start": 1601,
"end": 2432
}
|
class ____ implements SqlNodeConverter<SqlTruncateTable> {
@Override
public Operation convertSqlNode(SqlTruncateTable sqlTruncateTable, ConvertContext context) {
UnresolvedIdentifier unresolvedIdentifier =
UnresolvedIdentifier.of(sqlTruncateTable.fullTableName());
CatalogManager catalogManager = context.getCatalogManager();
ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
ContextResolvedTable contextResolvedTable = catalogManager.getTableOrError(tableIdentifier);
CatalogBaseTable catalogBaseTable = contextResolvedTable.getTable();
ValidationUtils.validateTableKind(catalogBaseTable, TableKind.TABLE, "truncate table");
return new TruncateTableOperation(tableIdentifier);
}
}
|
SqlTruncateTableConverter
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/onetoone/primarykey/NullablePrimaryKeyTest.java
|
{
"start": 890,
"end": 1916
}
|
class ____ {
@Test
public void testGeneratedSql() {
Map settings = new HashMap();
settings.putAll( Environment.getProperties() );
settings.put( AvailableSettings.DIALECT, SQLServerDialect.class.getName() );
ServiceRegistry serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( settings );
try {
MetadataSources ms = new MetadataSources( serviceRegistry );
ms.addAnnotatedClass( Address.class );
ms.addAnnotatedClass( Person.class );
final Metadata metadata = ms.buildMetadata();
final List<String> commands = new SchemaCreatorImpl( serviceRegistry ).generateCreationCommands(
metadata,
false
);
String expectedMappingTableSql = "create table personAddress (address_id bigint, " +
"person_id bigint not null, primary key (person_id))";
assertEquals( expectedMappingTableSql, commands.get( 2 ), "Wrong SQL" );
}
catch (Exception e) {
fail( e.getMessage() );
}
finally {
ServiceRegistryBuilder.destroy( serviceRegistry );
}
}
}
|
NullablePrimaryKeyTest
|
java
|
grpc__grpc-java
|
core/src/main/java/io/grpc/internal/RetriableStream.java
|
{
"start": 48366,
"end": 48770
}
|
class ____ {
ClientStream stream;
// GuardedBy RetriableStream.lock
boolean closed;
// setting to true must be GuardedBy RetriableStream.lock
boolean bufferLimitExceeded;
final int previousAttemptCount;
Substream(int previousAttemptCount) {
this.previousAttemptCount = previousAttemptCount;
}
}
/**
* Traces the buffer used by a substream.
*/
|
Substream
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/LinuxResourceCalculatorPlugin.java
|
{
"start": 1142,
"end": 1295
}
|
class ____ extends ResourceCalculatorPlugin {
public LinuxResourceCalculatorPlugin() {
super(new SysInfoLinux());
}
}
|
LinuxResourceCalculatorPlugin
|
java
|
elastic__elasticsearch
|
modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java
|
{
"start": 1454,
"end": 1507
}
|
class ____ not need to be
* whitelisted.
*/
public
|
does
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/TestBlockBuilder.java
|
{
"start": 4761,
"end": 6350
}
|
class ____ extends TestBlockBuilder {
private final LongBlock.Builder builder;
TestLongBlockBuilder(BlockFactory blockFactory, int estimatedSize) {
builder = blockFactory.newLongBlockBuilder(estimatedSize);
}
@Override
public TestBlockBuilder appendObject(Object object) {
builder.appendLong(((Number) object).longValue());
return this;
}
@Override
public TestBlockBuilder appendNull() {
builder.appendNull();
return this;
}
@Override
public TestBlockBuilder beginPositionEntry() {
builder.beginPositionEntry();
return this;
}
@Override
public TestBlockBuilder endPositionEntry() {
builder.endPositionEntry();
return this;
}
@Override
public TestBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusive) {
builder.copyFrom(block, beginInclusive, endExclusive);
return this;
}
@Override
public TestBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) {
builder.mvOrdering(mvOrdering);
return this;
}
@Override
public long estimatedBytes() {
return builder.estimatedBytes();
}
@Override
public LongBlock build() {
return builder.build();
}
@Override
public void close() {
builder.close();
}
}
private static
|
TestLongBlockBuilder
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/appender/rolling/RollingFileAppenderBuilderTest.java
|
{
"start": 1023,
"end": 1272
}
|
class ____ {
/**
* Tests https://issues.apache.org/jira/browse/LOG4J2-1620
*/
@Test
void testDefaultImmediateFlush() {
assertTrue(RollingFileAppender.newBuilder().isImmediateFlush());
}
}
|
RollingFileAppenderBuilderTest
|
java
|
quarkusio__quarkus
|
extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/inject/NamedBeanValidationSuccessTest.java
|
{
"start": 1171,
"end": 1526
}
|
class ____ {
public List<String> getList(String param) {
return Collections.singletonList(param);
}
public List<String> getList(boolean param) {
return Collections.emptyList();
}
public List<String> getList() {
return Collections.singletonList("one");
}
}
}
|
NamedFoo
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/standard/PropertiesConversionSpelTests.java
|
{
"start": 2979,
"end": 3205
}
|
class ____ {
@SuppressWarnings("unused")
public String foo(Properties props) {
return props.getProperty("x") + props.getProperty("y") + props.getProperty("z");
}
}
@SuppressWarnings("serial")
private static
|
TestBean
|
java
|
google__dagger
|
hilt-compiler/main/java/dagger/hilt/processor/internal/root/ComponentTreeDepsProcessingStep.java
|
{
"start": 2750,
"end": 9761
}
|
class ____ extends BaseProcessingStep {
private final Set<ClassName> componentTreeDepNames = new HashSet<>();
private final Set<ClassName> processed = new HashSet<>();
public ComponentTreeDepsProcessingStep(XProcessingEnv env) {
super(env);
}
@Override
protected ImmutableSet<ClassName> annotationClassNames() {
return ImmutableSet.of(ClassNames.COMPONENT_TREE_DEPS);
}
@Override
protected void processEach(ClassName annotation, XElement element) {
componentTreeDepNames.add(XElements.asTypeElement(element).getClassName());
}
@Override
public void postProcess(XProcessingEnv env, XRoundEnv roundEnv) throws Exception {
ImmutableSet<ComponentTreeDepsMetadata> componentTreeDepsToProcess =
componentTreeDepNames.stream()
.filter(className -> !processed.contains(className))
.map(className -> processingEnv().requireTypeElement(className))
.map(element -> ComponentTreeDepsMetadata.from(element, processingEnv()))
.collect(toImmutableSet());
DefineComponents defineComponents = DefineComponents.create();
for (ComponentTreeDepsMetadata metadata : componentTreeDepsToProcess) {
processComponentTreeDeps(metadata, defineComponents);
}
}
private void processComponentTreeDeps(
ComponentTreeDepsMetadata metadata, DefineComponents defineComponents) throws IOException {
XTypeElement metadataElement = processingEnv().requireTypeElement(metadata.name());
try {
// We choose a name for the generated components/wrapper based off of the originating element
// annotated with @ComponentTreeDeps. This is close to but isn't necessarily a "real" name of
// a root, since with shared test components, even for single roots, the component tree deps
// will be moved to a shared package with a deduped name.
ClassName renamedRoot = Processors.removeNameSuffix(metadataElement, "_ComponentTreeDeps");
ComponentNames componentNames = ComponentNames.withRenaming(rootName -> renamedRoot);
boolean isDefaultRoot = ClassNames.DEFAULT_ROOT.equals(renamedRoot);
ImmutableSet<Root> roots =
AggregatedRootMetadata.from(metadata.aggregatedRootDeps(), processingEnv()).stream()
.map(AggregatedRootMetadata::rootElement)
.map(rootElement -> Root.create(rootElement, processingEnv()))
.collect(toImmutableSet());
// TODO(bcorso): For legacy reasons, a lot of the generating code requires a "root" as input
// since we used to assume 1 root per component tree. Now that each ComponentTreeDeps may
// represent multiple roots, we should refactor this logic.
Root root =
isDefaultRoot
? Root.createDefaultRoot(processingEnv())
// Non-default roots should only ever be associated with one root element
: getOnlyElement(roots);
ImmutableSet<ComponentDescriptor> componentDescriptors =
defineComponents.getComponentDescriptors(
DefineComponentClassesMetadata.from(metadata.defineComponentDeps()));
ComponentDescriptor rootComponentDescriptor =
componentDescriptors.stream()
.filter(descriptor -> descriptor.component().equals(root.rootComponentName()))
.collect(toOptional())
.orElseThrow(() -> new AssertionError("Missing root: " + root.rootComponentName()));
ComponentTree tree = ComponentTree.from(componentDescriptors, rootComponentDescriptor);
ComponentDependencies deps =
ComponentDependencies.from(
componentDescriptors,
AggregatedDepsMetadata.from(metadata.aggregatedDeps()),
AggregatedUninstallModulesMetadata.from(metadata.aggregatedUninstallModulesDeps()),
AggregatedEarlyEntryPointMetadata.from(metadata.aggregatedEarlyEntryPointDeps()),
processingEnv());
AliasOfs aliasOfs =
AliasOfs.create(
AliasOfPropagatedDataMetadata.from(metadata.aliasOfDeps()), componentDescriptors);
RootMetadata rootMetadata = RootMetadata.create(root, tree, deps, aliasOfs, processingEnv());
generateComponents(metadata, rootMetadata, componentNames);
// Generate a creator for the early entry point if there is a default component available
// and there are early entry points.
if (isDefaultRoot && !metadata.aggregatedEarlyEntryPointDeps().isEmpty()) {
EarlySingletonComponentCreatorGenerator.generate(processingEnv());
}
if (root.isTestRoot()) {
// Generate test related classes for each test root that uses this component.
ImmutableList<RootMetadata> rootMetadatas =
roots.stream()
.map(test -> RootMetadata.create(test, tree, deps, aliasOfs, processingEnv()))
.collect(toImmutableList());
generateTestComponentData(metadataElement, rootMetadatas, componentNames);
} else {
generateApplication(root.element());
}
setProcessingState(metadata, root);
} catch (Exception e) {
processed.add(metadata.name());
throw e;
}
}
private void setProcessingState(ComponentTreeDepsMetadata metadata, Root root) {
processed.add(metadata.name());
}
private void generateComponents(
ComponentTreeDepsMetadata metadata, RootMetadata rootMetadata, ComponentNames componentNames)
throws IOException {
RootGenerator.generate(metadata, rootMetadata, componentNames, processingEnv());
}
private void generateTestComponentData(
XTypeElement metadataElement,
ImmutableList<RootMetadata> rootMetadatas,
ComponentNames componentNames)
throws IOException {
for (RootMetadata rootMetadata : rootMetadatas) {
// TODO(bcorso): Consider moving this check earlier into processEach.
XTypeElement testElement = rootMetadata.testRootMetadata().testElement();
ProcessorErrors.checkState(
testElement.isPublic(),
testElement,
"Hilt tests must be public, but found: %s",
XElements.toStableString(testElement));
new TestComponentDataGenerator(processingEnv(), metadataElement, rootMetadata, componentNames)
.generate();
}
}
private void generateApplication(XTypeElement rootElement) throws IOException {
// The generated application references the generated component so they must be generated
// in the same build unit. Thus, we only generate the application here if we're using the
// Hilt Gradle plugin's aggregating task. If we're using the aggregating processor, we need
// to generate the application within AndroidEntryPointProcessor instead.
if (!useAggregatingRootProcessor(processingEnv())) {
AndroidEntryPointMetadata metadata = AndroidEntryPointMetadata.of(rootElement);
new ApplicationGenerator(processingEnv(), metadata).generate();
}
}
}
|
ComponentTreeDepsProcessingStep
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/util/StringValueUtils.java
|
{
"start": 1391,
"end": 3218
}
|
class ____ {
/**
* Converts the given <code>StringValue</code> into a lower case variant.
*
* @param string The string to convert to lower case.
*/
public static void toLowerCase(StringValue string) {
final char[] chars = string.getCharArray();
final int len = string.length();
for (int i = 0; i < len; i++) {
chars[i] = Character.toLowerCase(chars[i]);
}
}
/**
* Replaces all non-word characters in a string by a given character. The only characters not
* replaced are the characters that qualify as word characters or digit characters with respect
* to {@link Character#isLetter(char)} or {@link Character#isDigit(char)}, as well as the
* underscore character.
*
* <p>This operation is intended to simplify strings for counting distinct words.
*
* @param string The string value to have the non-word characters replaced.
* @param replacement The character to use as the replacement.
*/
public static void replaceNonWordChars(StringValue string, char replacement) {
final char[] chars = string.getCharArray();
final int len = string.length();
for (int i = 0; i < len; i++) {
final char c = chars[i];
if (!(Character.isLetter(c) || Character.isDigit(c) || c == '_')) {
chars[i] = replacement;
}
}
}
// ============================================================================================
/**
* A tokenizer for string values that uses whitespace characters as token delimiters. The
* tokenizer is designed to have a resettable state and operate on mutable objects, sparing
* object allocation and garbage collection overhead.
*/
public static final
|
StringValueUtils
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java
|
{
"start": 42026,
"end": 43266
}
|
class ____ {
public final char[] passwordHash;
public final boolean enabled;
private final Hasher hasher;
ReservedUserInfo(char[] passwordHash, boolean enabled) {
this.passwordHash = passwordHash;
this.enabled = enabled;
this.hasher = Hasher.resolveFromHash(this.passwordHash);
}
boolean hasEmptyPassword() {
return passwordHash.length == 0;
}
boolean verifyPassword(SecureString data) {
return hasher.verify(data, this.passwordHash);
}
static ReservedUserInfo defaultEnabledUserInfo() {
return new ReservedUserInfo(new char[0], true);
}
static ReservedUserInfo defaultDisabledUserInfo() {
return new ReservedUserInfo(new char[0], false);
}
}
/**
* Result record for every document matching a user
*/
public record QueryUserResult(User user, Object[] sortValues) {}
/**
* Total result for a Query User query
*/
public record QueryUserResults(List<QueryUserResult> userQueryResult, long total) {
public static final QueryUserResults EMPTY = new QueryUserResults(List.of(), 0);
}
}
|
ReservedUserInfo
|
java
|
apache__camel
|
components/camel-metrics/src/test/java/org/apache/camel/component/metrics/HistogramProducerTest.java
|
{
"start": 1609,
"end": 4378
}
|
class ____ {
private static final String METRICS_NAME = "metrics.name";
private static final Long VALUE = System.currentTimeMillis();
@Mock
private MetricsEndpoint endpoint;
@Mock
private MetricRegistry registry;
@Mock
private Histogram histogram;
@Mock
private Exchange exchange;
@Mock
private Message in;
private HistogramProducer producer;
private InOrder inOrder;
@BeforeEach
public void setUp() {
producer = new HistogramProducer(endpoint);
inOrder = Mockito.inOrder(endpoint, registry, histogram, exchange, in);
lenient().when(registry.histogram(METRICS_NAME)).thenReturn(histogram);
lenient().when(exchange.getIn()).thenReturn(in);
}
@Test
public void testHistogramProducer() {
assertThat(producer.getEndpoint().equals(endpoint), is(true));
}
@Test
public void testProcessValueSet() throws Exception {
when(endpoint.getValue()).thenReturn(VALUE);
when(in.getHeader(HEADER_HISTOGRAM_VALUE, VALUE, Long.class)).thenReturn(VALUE);
producer.doProcess(exchange, endpoint, registry, METRICS_NAME);
inOrder.verify(exchange, times(1)).getIn();
inOrder.verify(registry, times(1)).histogram(METRICS_NAME);
inOrder.verify(endpoint, times(1)).getValue();
inOrder.verify(in, times(1)).getHeader(HEADER_HISTOGRAM_VALUE, VALUE, Long.class);
inOrder.verify(histogram, times(1)).update(VALUE);
inOrder.verifyNoMoreInteractions();
}
@Test
public void testProcessValueNotSet() throws Exception {
Object action = null;
when(endpoint.getValue()).thenReturn(null);
producer.doProcess(exchange, endpoint, registry, METRICS_NAME);
inOrder.verify(exchange, times(1)).getIn();
inOrder.verify(registry, times(1)).histogram(METRICS_NAME);
inOrder.verify(endpoint, times(1)).getValue();
inOrder.verify(in, times(1)).getHeader(HEADER_HISTOGRAM_VALUE, action, Long.class);
inOrder.verifyNoMoreInteractions();
}
@Test
public void testProcessOverrideValue() throws Exception {
when(endpoint.getValue()).thenReturn(VALUE);
when(in.getHeader(HEADER_HISTOGRAM_VALUE, VALUE, Long.class)).thenReturn(VALUE + 3);
producer.doProcess(exchange, endpoint, registry, METRICS_NAME);
inOrder.verify(exchange, times(1)).getIn();
inOrder.verify(registry, times(1)).histogram(METRICS_NAME);
inOrder.verify(endpoint, times(1)).getValue();
inOrder.verify(in, times(1)).getHeader(HEADER_HISTOGRAM_VALUE, VALUE, Long.class);
inOrder.verify(histogram, times(1)).update(VALUE + 3);
inOrder.verifyNoMoreInteractions();
}
}
|
HistogramProducerTest
|
java
|
spring-projects__spring-framework
|
spring-tx/src/test/java/org/springframework/transaction/event/TransactionalApplicationListenerMethodAdapterTests.java
|
{
"start": 12684,
"end": 13317
}
|
class ____ {
@TransactionalEventListener
public void defaultPhase(String data) {
}
@TransactionalEventListener
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void withTransactionalRequiresNewAnnotation(String data) {
}
@TransactionalEventListener
@Transactional(propagation = Propagation.NOT_SUPPORTED)
public void withTransactionalNotSupportedAnnotation(String data) {
}
@TransactionalEventListener
@Async @Transactional(propagation = Propagation.REQUIRES_NEW)
public void withAsyncTransactionalAnnotation(String data) {
}
}
}
}
|
SampleEventsWithTransactionalAnnotation
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/clients/producer/RecordMetadataTest.java
|
{
"start": 1056,
"end": 2537
}
|
class ____ {
@Test
public void testConstructionWithMissingBatchIndex() {
TopicPartition tp = new TopicPartition("foo", 0);
long timestamp = 2340234L;
int keySize = 3;
int valueSize = 5;
RecordMetadata metadata = new RecordMetadata(tp, -1L, -1, timestamp, keySize, valueSize);
assertEquals(tp.topic(), metadata.topic());
assertEquals(tp.partition(), metadata.partition());
assertEquals(timestamp, metadata.timestamp());
assertFalse(metadata.hasOffset());
assertEquals(-1L, metadata.offset());
assertEquals(keySize, metadata.serializedKeySize());
assertEquals(valueSize, metadata.serializedValueSize());
}
@Test
public void testConstructionWithBatchIndexOffset() {
TopicPartition tp = new TopicPartition("foo", 0);
long timestamp = 2340234L;
int keySize = 3;
int valueSize = 5;
long baseOffset = 15L;
int batchIndex = 3;
RecordMetadata metadata = new RecordMetadata(tp, baseOffset, batchIndex, timestamp, keySize, valueSize);
assertEquals(tp.topic(), metadata.topic());
assertEquals(tp.partition(), metadata.partition());
assertEquals(timestamp, metadata.timestamp());
assertEquals(baseOffset + batchIndex, metadata.offset());
assertEquals(keySize, metadata.serializedKeySize());
assertEquals(valueSize, metadata.serializedValueSize());
}
}
|
RecordMetadataTest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.